aboutsummaryrefslogtreecommitdiffstats
path: root/meson/mesonbuild
diff options
context:
space:
mode:
authorAngelos Mouzakitis <a.mouzakitis@virtualopensystems.com>2023-10-10 14:33:42 +0000
committerAngelos Mouzakitis <a.mouzakitis@virtualopensystems.com>2023-10-10 14:33:42 +0000
commitaf1a266670d040d2f4083ff309d732d648afba2a (patch)
tree2fc46203448ddcc6f81546d379abfaeb323575e9 /meson/mesonbuild
parente02cda008591317b1625707ff8e115a4841aa889 (diff)
Add submodule dependency filesHEADmaster
Change-Id: Iaf8d18082d3991dec7c0ebbea540f092188eb4ec
Diffstat (limited to 'meson/mesonbuild')
-rw-r--r--meson/mesonbuild/__init__.py0
-rw-r--r--meson/mesonbuild/_pathlib.py73
-rw-r--r--meson/mesonbuild/_typing.py120
-rw-r--r--meson/mesonbuild/arglist.py334
-rw-r--r--meson/mesonbuild/ast/__init__.py34
-rw-r--r--meson/mesonbuild/ast/interpreter.py424
-rw-r--r--meson/mesonbuild/ast/introspection.py330
-rw-r--r--meson/mesonbuild/ast/postprocess.py117
-rw-r--r--meson/mesonbuild/ast/printer.py366
-rw-r--r--meson/mesonbuild/ast/visitor.py142
-rw-r--r--meson/mesonbuild/backend/__init__.py0
-rw-r--r--meson/mesonbuild/backend/backends.py1616
-rw-r--r--meson/mesonbuild/backend/ninjabackend.py3352
-rw-r--r--meson/mesonbuild/backend/vs2010backend.py1562
-rw-r--r--meson/mesonbuild/backend/vs2012backend.py38
-rw-r--r--meson/mesonbuild/backend/vs2013backend.py38
-rw-r--r--meson/mesonbuild/backend/vs2015backend.py38
-rw-r--r--meson/mesonbuild/backend/vs2017backend.py52
-rw-r--r--meson/mesonbuild/backend/vs2019backend.py47
-rw-r--r--meson/mesonbuild/backend/xcodebackend.py1708
-rw-r--r--meson/mesonbuild/build.py2686
-rw-r--r--meson/mesonbuild/cmake/__init__.py46
-rw-r--r--meson/mesonbuild/cmake/client.py373
-rw-r--r--meson/mesonbuild/cmake/common.py334
-rw-r--r--meson/mesonbuild/cmake/data/preload.cmake82
-rw-r--r--meson/mesonbuild/cmake/executor.py246
-rw-r--r--meson/mesonbuild/cmake/fileapi.py320
-rw-r--r--meson/mesonbuild/cmake/generator.py134
-rw-r--r--meson/mesonbuild/cmake/interpreter.py1369
-rw-r--r--meson/mesonbuild/cmake/toolchain.py259
-rw-r--r--meson/mesonbuild/cmake/traceparser.py756
-rw-r--r--meson/mesonbuild/compilers/__init__.py250
-rw-r--r--meson/mesonbuild/compilers/c.py714
-rw-r--r--meson/mesonbuild/compilers/c_function_attributes.py132
-rw-r--r--meson/mesonbuild/compilers/compilers.py1294
-rw-r--r--meson/mesonbuild/compilers/cpp.py823
-rw-r--r--meson/mesonbuild/compilers/cs.py150
-rw-r--r--meson/mesonbuild/compilers/cuda.py760
-rw-r--r--meson/mesonbuild/compilers/cython.py79
-rw-r--r--meson/mesonbuild/compilers/d.py906
-rw-r--r--meson/mesonbuild/compilers/detect.py1219
-rw-r--r--meson/mesonbuild/compilers/fortran.py504
-rw-r--r--meson/mesonbuild/compilers/java.py104
-rw-r--r--meson/mesonbuild/compilers/mixins/__init__.py0
-rw-r--r--meson/mesonbuild/compilers/mixins/arm.py190
-rw-r--r--meson/mesonbuild/compilers/mixins/c2000.py124
-rw-r--r--meson/mesonbuild/compilers/mixins/ccrx.py130
-rw-r--r--meson/mesonbuild/compilers/mixins/clang.py162
-rw-r--r--meson/mesonbuild/compilers/mixins/clike.py1267
-rw-r--r--meson/mesonbuild/compilers/mixins/compcert.py131
-rw-r--r--meson/mesonbuild/compilers/mixins/elbrus.py82
-rw-r--r--meson/mesonbuild/compilers/mixins/emscripten.py69
-rw-r--r--meson/mesonbuild/compilers/mixins/gnu.py398
-rw-r--r--meson/mesonbuild/compilers/mixins/intel.py189
-rw-r--r--meson/mesonbuild/compilers/mixins/islinker.py129
-rw-r--r--meson/mesonbuild/compilers/mixins/pgi.py109
-rw-r--r--meson/mesonbuild/compilers/mixins/visualstudio.py428
-rw-r--r--meson/mesonbuild/compilers/mixins/xc16.py127
-rw-r--r--meson/mesonbuild/compilers/objc.py108
-rw-r--r--meson/mesonbuild/compilers/objcpp.py110
-rw-r--r--meson/mesonbuild/compilers/rust.py170
-rw-r--r--meson/mesonbuild/compilers/swift.py127
-rw-r--r--meson/mesonbuild/compilers/vala.py138
-rw-r--r--meson/mesonbuild/coredata.py1228
-rw-r--r--meson/mesonbuild/dependencies/__init__.py275
-rw-r--r--meson/mesonbuild/dependencies/base.py573
-rw-r--r--meson/mesonbuild/dependencies/boost.py1080
-rw-r--r--meson/mesonbuild/dependencies/cmake.py718
-rw-r--r--meson/mesonbuild/dependencies/coarrays.py90
-rw-r--r--meson/mesonbuild/dependencies/configtool.py178
-rw-r--r--meson/mesonbuild/dependencies/cuda.py291
-rw-r--r--meson/mesonbuild/dependencies/data/CMakeLists.txt98
-rw-r--r--meson/mesonbuild/dependencies/data/CMakeListsLLVM.txt95
-rw-r--r--meson/mesonbuild/dependencies/data/CMakePathInfo.txt31
-rw-r--r--meson/mesonbuild/dependencies/detect.py226
-rw-r--r--meson/mesonbuild/dependencies/dev.py595
-rw-r--r--meson/mesonbuild/dependencies/dub.py240
-rw-r--r--meson/mesonbuild/dependencies/factory.py151
-rw-r--r--meson/mesonbuild/dependencies/framework.py123
-rw-r--r--meson/mesonbuild/dependencies/hdf5.py180
-rw-r--r--meson/mesonbuild/dependencies/misc.py623
-rw-r--r--meson/mesonbuild/dependencies/mpi.py236
-rw-r--r--meson/mesonbuild/dependencies/pkgconfig.py503
-rw-r--r--meson/mesonbuild/dependencies/platform.py58
-rw-r--r--meson/mesonbuild/dependencies/qt.py438
-rw-r--r--meson/mesonbuild/dependencies/scalapack.py153
-rw-r--r--meson/mesonbuild/dependencies/ui.py277
-rw-r--r--meson/mesonbuild/depfile.py85
-rw-r--r--meson/mesonbuild/envconfig.py425
-rw-r--r--meson/mesonbuild/environment.py867
-rw-r--r--meson/mesonbuild/interpreter/__init__.py25
-rw-r--r--meson/mesonbuild/interpreter/compiler.py785
-rw-r--r--meson/mesonbuild/interpreter/dependencyfallbacks.py351
-rw-r--r--meson/mesonbuild/interpreter/interpreter.py2794
-rw-r--r--meson/mesonbuild/interpreter/interpreterobjects.py996
-rw-r--r--meson/mesonbuild/interpreter/kwargs.py139
-rw-r--r--meson/mesonbuild/interpreter/mesonmain.py382
-rw-r--r--meson/mesonbuild/interpreterbase/__init__.py122
-rw-r--r--meson/mesonbuild/interpreterbase/_unholder.py39
-rw-r--r--meson/mesonbuild/interpreterbase/baseobjects.py96
-rw-r--r--meson/mesonbuild/interpreterbase/decorators.py650
-rw-r--r--meson/mesonbuild/interpreterbase/disabler.py42
-rw-r--r--meson/mesonbuild/interpreterbase/exceptions.py33
-rw-r--r--meson/mesonbuild/interpreterbase/helpers.py118
-rw-r--r--meson/mesonbuild/interpreterbase/interpreterbase.py959
-rw-r--r--meson/mesonbuild/linkers/__init__.py126
-rw-r--r--meson/mesonbuild/linkers/detect.py216
-rw-r--r--meson/mesonbuild/linkers/linkers.py1433
-rw-r--r--meson/mesonbuild/mcompile.py358
-rw-r--r--meson/mesonbuild/mconf.py334
-rw-r--r--meson/mesonbuild/mdevenv.py78
-rw-r--r--meson/mesonbuild/mdist.py319
-rw-r--r--meson/mesonbuild/mesondata.py394
-rw-r--r--meson/mesonbuild/mesonlib/__init__.py30
-rw-r--r--meson/mesonbuild/mesonlib/platform.py37
-rw-r--r--meson/mesonbuild/mesonlib/posix.py39
-rw-r--r--meson/mesonbuild/mesonlib/universal.py2190
-rw-r--r--meson/mesonbuild/mesonlib/win32.py39
-rw-r--r--meson/mesonbuild/mesonmain.py329
-rw-r--r--meson/mesonbuild/minit.py186
-rw-r--r--meson/mesonbuild/minstall.py721
-rw-r--r--meson/mesonbuild/mintro.py543
-rw-r--r--meson/mesonbuild/mlog.py395
-rw-r--r--meson/mesonbuild/modules/__init__.py212
-rw-r--r--meson/mesonbuild/modules/cmake.py406
-rw-r--r--meson/mesonbuild/modules/dlang.py135
-rw-r--r--meson/mesonbuild/modules/fs.py258
-rw-r--r--meson/mesonbuild/modules/gnome.py1812
-rw-r--r--meson/mesonbuild/modules/hotdoc.py432
-rw-r--r--meson/mesonbuild/modules/i18n.py197
-rw-r--r--meson/mesonbuild/modules/keyval.py72
-rw-r--r--meson/mesonbuild/modules/modtest.py30
-rw-r--r--meson/mesonbuild/modules/pkgconfig.py591
-rw-r--r--meson/mesonbuild/modules/python.py661
-rw-r--r--meson/mesonbuild/modules/python3.py81
-rw-r--r--meson/mesonbuild/modules/qt.py524
-rw-r--r--meson/mesonbuild/modules/qt4.py25
-rw-r--r--meson/mesonbuild/modules/qt5.py25
-rw-r--r--meson/mesonbuild/modules/qt6.py25
-rw-r--r--meson/mesonbuild/modules/rpm.py186
-rw-r--r--meson/mesonbuild/modules/sourceset.py198
-rw-r--r--meson/mesonbuild/modules/unstable_cuda.py350
-rw-r--r--meson/mesonbuild/modules/unstable_external_project.py268
-rw-r--r--meson/mesonbuild/modules/unstable_icestorm.py89
-rw-r--r--meson/mesonbuild/modules/unstable_rust.py227
-rw-r--r--meson/mesonbuild/modules/unstable_simd.py88
-rw-r--r--meson/mesonbuild/modules/windows.py171
-rw-r--r--meson/mesonbuild/mparser.py814
-rw-r--r--meson/mesonbuild/msetup.py282
-rwxr-xr-xmeson/mesonbuild/msubprojects.py561
-rw-r--r--meson/mesonbuild/mtest.py2011
-rw-r--r--meson/mesonbuild/munstable_coredata.py114
-rw-r--r--meson/mesonbuild/optinterpreter.py234
-rw-r--r--meson/mesonbuild/programs.py386
-rw-r--r--meson/mesonbuild/rewriter.py970
-rw-r--r--meson/mesonbuild/scripts/__init__.py21
-rw-r--r--meson/mesonbuild/scripts/clangformat.py91
-rw-r--r--meson/mesonbuild/scripts/clangtidy.py57
-rw-r--r--meson/mesonbuild/scripts/cleantrees.py44
-rwxr-xr-xmeson/mesonbuild/scripts/cmake_run_ctgt.py102
-rw-r--r--meson/mesonbuild/scripts/cmd_or_ps.ps122
-rw-r--r--meson/mesonbuild/scripts/coverage.py173
-rw-r--r--meson/mesonbuild/scripts/delwithsuffix.py36
-rw-r--r--meson/mesonbuild/scripts/depfixer.py509
-rw-r--r--meson/mesonbuild/scripts/depscan.py201
-rw-r--r--meson/mesonbuild/scripts/dirchanger.py29
-rw-r--r--meson/mesonbuild/scripts/externalproject.py109
-rw-r--r--meson/mesonbuild/scripts/gettext.py125
-rw-r--r--meson/mesonbuild/scripts/gtkdochelper.py295
-rw-r--r--meson/mesonbuild/scripts/hotdochelper.py38
-rw-r--r--meson/mesonbuild/scripts/meson_exe.py125
-rw-r--r--meson/mesonbuild/scripts/msgfmthelper.py37
-rw-r--r--meson/mesonbuild/scripts/regen_checker.py64
-rw-r--r--meson/mesonbuild/scripts/scanbuild.py65
-rw-r--r--meson/mesonbuild/scripts/symbolextractor.py331
-rw-r--r--meson/mesonbuild/scripts/tags.py53
-rw-r--r--meson/mesonbuild/scripts/uninstall.py50
-rw-r--r--meson/mesonbuild/scripts/vcstagger.py44
-rw-r--r--meson/mesonbuild/scripts/yelphelper.py133
-rw-r--r--meson/mesonbuild/templates/__init__.py0
-rw-r--r--meson/mesonbuild/templates/cpptemplates.py185
-rw-r--r--meson/mesonbuild/templates/cstemplates.py134
-rw-r--r--meson/mesonbuild/templates/ctemplates.py166
-rw-r--r--meson/mesonbuild/templates/cudatemplates.py185
-rw-r--r--meson/mesonbuild/templates/dlangtemplates.py143
-rw-r--r--meson/mesonbuild/templates/fortrantemplates.py140
-rw-r--r--meson/mesonbuild/templates/javatemplates.py136
-rw-r--r--meson/mesonbuild/templates/mesontemplates.py75
-rw-r--r--meson/mesonbuild/templates/objcpptemplates.py167
-rw-r--r--meson/mesonbuild/templates/objctemplates.py166
-rw-r--r--meson/mesonbuild/templates/rusttemplates.py113
-rw-r--r--meson/mesonbuild/templates/samplefactory.py40
-rw-r--r--meson/mesonbuild/templates/sampleimpl.py21
-rw-r--r--meson/mesonbuild/wrap/__init__.py59
-rw-r--r--meson/mesonbuild/wrap/wrap.py607
-rw-r--r--meson/mesonbuild/wrap/wraptool.py220
196 files changed, 71000 insertions, 0 deletions
diff --git a/meson/mesonbuild/__init__.py b/meson/mesonbuild/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/meson/mesonbuild/__init__.py
diff --git a/meson/mesonbuild/_pathlib.py b/meson/mesonbuild/_pathlib.py
new file mode 100644
index 000000000..640b5ed21
--- /dev/null
+++ b/meson/mesonbuild/_pathlib.py
@@ -0,0 +1,73 @@
+# Copyright 2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''
+ This module soly exists to work around a pathlib.resolve bug on
+ certain Windows systems:
+
+ https://github.com/mesonbuild/meson/issues/7295
+ https://bugs.python.org/issue31842
+
+ It should **never** be used directly. Instead, it is automatically
+ used when `import pathlib` is used. This is achieved by messing with
+ `sys.modules['pathlib']` in mesonmain.
+
+ Additionally, the sole purpose of this module is to work around a
+ python bug. This only bugfixes to pathlib functions and classes are
+ allowed here. Finally, this file should be removed once all upstream
+ python bugs are fixed and it is OK to tell our users to "just upgrade
+ python".
+'''
+
+import pathlib
+import os
+import platform
+
+__all__ = [
+ 'PurePath',
+ 'PurePosixPath',
+ 'PureWindowsPath',
+ 'Path',
+]
+
+PurePath = pathlib.PurePath
+PurePosixPath = pathlib.PurePosixPath
+PureWindowsPath = pathlib.PureWindowsPath
+
+# Only patch on platforms where the bug occurs
+if platform.system().lower() in {'windows'}:
+ # Can not directly inherit from pathlib.Path because the __new__
+ # operator of pathlib.Path() returns a {Posix,Windows}Path object.
+ class Path(type(pathlib.Path())):
+ def resolve(self, strict: bool = False) -> 'Path':
+ '''
+ Work around a resolve bug on certain Windows systems:
+
+ https://github.com/mesonbuild/meson/issues/7295
+ https://bugs.python.org/issue31842
+ '''
+
+ try:
+ return super().resolve(strict=strict)
+ except OSError:
+ return Path(os.path.normpath(self))
+else:
+ Path = pathlib.Path
+ PosixPath = pathlib.PosixPath
+ WindowsPath = pathlib.WindowsPath
+
+ __all__ += [
+ 'PosixPath',
+ 'WindowsPath',
+ ]
diff --git a/meson/mesonbuild/_typing.py b/meson/mesonbuild/_typing.py
new file mode 100644
index 000000000..31a6e18b6
--- /dev/null
+++ b/meson/mesonbuild/_typing.py
@@ -0,0 +1,120 @@
+# SPDX-License-Identifer: Apache-2.0
+# Copyright 2020 The Meson development team
+# Copyright © 2020-2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Meson specific typing helpers.
+
+Holds typing helper classes, such as the ImmutableProtocol classes
+"""
+
+__all__ = [
+ 'Protocol',
+ 'ImmutableListProtocol'
+]
+
+import typing
+
+# We can change this to typing when we require python 3.8
+from typing_extensions import Protocol
+
+
+T = typing.TypeVar('T')
+
+
+class StringProtocol(Protocol):
+ def __str__(self) -> str: ...
+
+class SizedStringProtocol(Protocol, StringProtocol, typing.Sized):
+ pass
+
+class ImmutableListProtocol(Protocol[T]):
+
+ """A protocol used in cases where a list is returned, but should not be
+ mutated.
+
+ This provides all of the methods of a Sequence, as well as copy(). copy()
+ returns a list, which allows mutation as it's a copy and that's (hopefully)
+ safe.
+
+ One particular case this is important is for cached values, since python is
+ a pass-by-reference language.
+ """
+
+ def __iter__(self) -> typing.Iterator[T]: ...
+
+ @typing.overload
+ def __getitem__(self, index: int) -> T:...
+ @typing.overload
+ def __getitem__(self, index: slice) -> typing.List[T]: ...
+
+ def __contains__(self, item: T) -> bool: ...
+
+ def __reversed__(self) -> typing.Iterator[T]: ...
+
+ def __len__(self) -> int: ...
+
+ def __add__(self, other: typing.List[T]) -> typing.List[T]: ...
+
+ def __eq__(self, other: typing.Any) -> bool: ...
+ def __ne__(self, other: typing.Any) -> bool: ...
+ def __le__(self, other: typing.Any) -> bool: ...
+ def __lt__(self, other: typing.Any) -> bool: ...
+ def __gt__(self, other: typing.Any) -> bool: ...
+ def __ge__(self, other: typing.Any) -> bool: ...
+
+ def count(self, item: T) -> int: ...
+
+ def index(self, item: T) -> int: ...
+
+ def copy(self) -> typing.List[T]: ...
+
+
+class ImmutableSetProtocol(Protocol[T]):
+
+ """A protocol for a set that cannot be mutated.
+
+ This provides for cases where mutation of the set is undesired. Although
+ this will be allowed at runtime, mypy (or another type checker), will see
+ any attempt to use mutative methods as an error.
+ """
+
+ def __iter__(self) -> typing.Iterator[T]: ...
+
+ def __contains__(self, item: T) -> bool: ...
+
+ def __len__(self) -> int: ...
+
+ def __add__(self, other: typing.Set[T]) -> typing.Set[T]: ...
+
+ def __eq__(self, other: typing.Any) -> bool: ...
+ def __ne__(self, other: typing.Any) -> bool: ...
+ def __le__(self, other: typing.Any) -> bool: ...
+ def __lt__(self, other: typing.Any) -> bool: ...
+ def __gt__(self, other: typing.Any) -> bool: ...
+ def __ge__(self, other: typing.Any) -> bool: ...
+
+ def copy(self) -> typing.Set[T]: ...
+
+ def difference(self, other: typing.Set[T]) -> typing.Set[T]: ...
+
+ def intersection(self, other: typing.Set[T]) -> typing.Set[T]: ...
+
+ def issubset(self, other: typing.Set[T]) -> bool: ...
+
+ def issuperset(self, other: typing.Set[T]) -> bool: ...
+
+ def symmetric_difference(self, other: typing.Set[T]) -> typing.Set[T]: ...
+
+ def union(self, other: typing.Set[T]) -> typing.Set[T]: ...
diff --git a/meson/mesonbuild/arglist.py b/meson/mesonbuild/arglist.py
new file mode 100644
index 000000000..e150d39ad
--- /dev/null
+++ b/meson/mesonbuild/arglist.py
@@ -0,0 +1,334 @@
+# Copyright 2012-2020 The Meson development team
+# Copyright © 2020 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from functools import lru_cache
+import collections
+import enum
+import os
+import re
+import typing as T
+
+from . import mesonlib
+
+if T.TYPE_CHECKING:
+ from .linkers import StaticLinker
+ from .compilers import Compiler
+
+UNIXY_COMPILER_INTERNAL_LIBS = ['m', 'c', 'pthread', 'dl', 'rt'] # type: T.List[str]
+# execinfo is a compiler lib on FreeBSD and NetBSD
+if mesonlib.is_freebsd() or mesonlib.is_netbsd():
+ UNIXY_COMPILER_INTERNAL_LIBS.append('execinfo')
+
+
+class Dedup(enum.Enum):
+
+ """What kind of deduplication can be done to compiler args.
+
+ OVERRIDDEN - Whether an argument can be 'overridden' by a later argument.
+ For example, -DFOO defines FOO and -UFOO undefines FOO. In this case,
+ we can safely remove the previous occurrence and add a new one. The
+ same is true for include paths and library paths with -I and -L.
+ UNIQUE - Arguments that once specified cannot be undone, such as `-c` or
+ `-pipe`. New instances of these can be completely skipped.
+ NO_DEDUP - Whether it matters where or how many times on the command-line
+ a particular argument is present. This can matter for symbol
+ resolution in static or shared libraries, so we cannot de-dup or
+ reorder them.
+ """
+
+ NO_DEDUP = 0
+ UNIQUE = 1
+ OVERRIDDEN = 2
+
+
+class CompilerArgs(collections.abc.MutableSequence):
+ '''
+ List-like class that manages a list of compiler arguments. Should be used
+ while constructing compiler arguments from various sources. Can be
+ operated with ordinary lists, so this does not need to be used
+ everywhere.
+
+ All arguments must be inserted and stored in GCC-style (-lfoo, -Idir, etc)
+ and can converted to the native type of each compiler by using the
+ .to_native() method to which you must pass an instance of the compiler or
+ the compiler class.
+
+ New arguments added to this class (either with .append(), .extend(), or +=)
+ are added in a way that ensures that they override previous arguments.
+ For example:
+
+ >>> a = ['-Lfoo', '-lbar']
+ >>> a += ['-Lpho', '-lbaz']
+ >>> print(a)
+ ['-Lpho', '-Lfoo', '-lbar', '-lbaz']
+
+ Arguments will also be de-duped if they can be de-duped safely.
+
+ Note that because of all this, this class is not commutative and does not
+ preserve the order of arguments if it is safe to not. For example:
+ >>> ['-Ifoo', '-Ibar'] + ['-Ifez', '-Ibaz', '-Werror']
+ ['-Ifez', '-Ibaz', '-Ifoo', '-Ibar', '-Werror']
+ >>> ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifoo', '-Ibar']
+ ['-Ifoo', '-Ibar', '-Ifez', '-Ibaz', '-Werror']
+
+ '''
+ # Arg prefixes that override by prepending instead of appending
+ prepend_prefixes = () # type: T.Tuple[str, ...]
+
+ # Arg prefixes and args that must be de-duped by returning 2
+ dedup2_prefixes = () # type: T.Tuple[str, ...]
+ dedup2_suffixes = () # type: T.Tuple[str, ...]
+ dedup2_args = () # type: T.Tuple[str, ...]
+
+ # Arg prefixes and args that must be de-duped by returning 1
+ #
+ # NOTE: not thorough. A list of potential corner cases can be found in
+ # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038
+ dedup1_prefixes = () # type: T.Tuple[str, ...]
+ dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') # type: T.Tuple[str, ...]
+ # Match a .so of the form path/to/libfoo.so.0.1.0
+ # Only UNIX shared libraries require this. Others have a fixed extension.
+ dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
+ dedup1_args = () # type: T.Tuple[str, ...]
+ # In generate_link() we add external libs without de-dup, but we must
+ # *always* de-dup these because they're special arguments to the linker
+ # TODO: these should probably move too
+ always_dedup_args = tuple('-l' + lib for lib in UNIXY_COMPILER_INTERNAL_LIBS) # type : T.Tuple[str, ...]
+
+ def __init__(self, compiler: T.Union['Compiler', 'StaticLinker'],
+ iterable: T.Optional[T.Iterable[str]] = None):
+ self.compiler = compiler
+ self._container = list(iterable) if iterable is not None else [] # type: T.List[str]
+ self.pre = collections.deque() # type: T.Deque[str]
+ self.post = collections.deque() # type: T.Deque[str]
+
+ # Flush the saved pre and post list into the _container list
+ #
+ # This correctly deduplicates the entries after _can_dedup definition
+ # Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot.
+ def flush_pre_post(self) -> None:
+ new = list() # type: T.List[str]
+ pre_flush_set = set() # type: T.Set[str]
+ post_flush = collections.deque() # type: T.Deque[str]
+ post_flush_set = set() # type: T.Set[str]
+
+ #The two lists are here walked from the front to the back, in order to not need removals for deduplication
+ for a in self.pre:
+ dedup = self._can_dedup(a)
+ if a not in pre_flush_set:
+ new.append(a)
+ if dedup is Dedup.OVERRIDDEN:
+ pre_flush_set.add(a)
+ for a in reversed(self.post):
+ dedup = self._can_dedup(a)
+ if a not in post_flush_set:
+ post_flush.appendleft(a)
+ if dedup is Dedup.OVERRIDDEN:
+ post_flush_set.add(a)
+
+ #pre and post will overwrite every element that is in the container
+ #only copy over args that are in _container but not in the post flush or pre flush set
+ if pre_flush_set or post_flush_set:
+ for a in self._container:
+ if a not in post_flush_set and a not in pre_flush_set:
+ new.append(a)
+ else:
+ new.extend(self._container)
+ new.extend(post_flush)
+
+ self._container = new
+ self.pre.clear()
+ self.post.clear()
+
+ def __iter__(self) -> T.Iterator[str]:
+ self.flush_pre_post()
+ return iter(self._container)
+
+ @T.overload # noqa: F811
+ def __getitem__(self, index: int) -> str: # noqa: F811
+ pass
+
+ @T.overload # noqa: F811
+ def __getitem__(self, index: slice) -> T.MutableSequence[str]: # noqa: F811
+ pass
+
+ def __getitem__(self, index: T.Union[int, slice]) -> T.Union[str, T.MutableSequence[str]]: # noqa: F811
+ self.flush_pre_post()
+ return self._container[index]
+
+ @T.overload # noqa: F811
+ def __setitem__(self, index: int, value: str) -> None: # noqa: F811
+ pass
+
+ @T.overload # noqa: F811
+ def __setitem__(self, index: slice, value: T.Iterable[str]) -> None: # noqa: F811
+ pass
+
+ def __setitem__(self, index: T.Union[int, slice], value: T.Union[str, T.Iterable[str]]) -> None: # noqa: F811
+ self.flush_pre_post()
+ self._container[index] = value # type: ignore # TODO: fix 'Invalid index type' and 'Incompatible types in assignment' erros
+
+ def __delitem__(self, index: T.Union[int, slice]) -> None:
+ self.flush_pre_post()
+ del self._container[index]
+
+ def __len__(self) -> int:
+ return len(self._container) + len(self.pre) + len(self.post)
+
+ def insert(self, index: int, value: str) -> None:
+ self.flush_pre_post()
+ self._container.insert(index, value)
+
+ def copy(self) -> 'CompilerArgs':
+ self.flush_pre_post()
+ return type(self)(self.compiler, self._container.copy())
+
+ @classmethod
+ @lru_cache(maxsize=None)
+ def _can_dedup(cls, arg: str) -> Dedup:
+ """Returns whether the argument can be safely de-duped.
+
+ In addition to these, we handle library arguments specially.
+ With GNU ld, we surround library arguments with -Wl,--start/end-gr -> Dedupoup
+ to recursively search for symbols in the libraries. This is not needed
+ with other linkers.
+ """
+
+ # A standalone argument must never be deduplicated because it is
+ # defined by what comes _after_ it. Thus dedupping this:
+ # -D FOO -D BAR
+ # would yield either
+ # -D FOO BAR
+ # or
+ # FOO -D BAR
+ # both of which are invalid.
+ if arg in cls.dedup2_prefixes:
+ return Dedup.NO_DEDUP
+ if arg in cls.dedup2_args or \
+ arg.startswith(cls.dedup2_prefixes) or \
+ arg.endswith(cls.dedup2_suffixes):
+ return Dedup.OVERRIDDEN
+ if arg in cls.dedup1_args or \
+ arg.startswith(cls.dedup1_prefixes) or \
+ arg.endswith(cls.dedup1_suffixes) or \
+ re.search(cls.dedup1_regex, arg):
+ return Dedup.UNIQUE
+ return Dedup.NO_DEDUP
+
+ @classmethod
+ @lru_cache(maxsize=None)
+ def _should_prepend(cls, arg: str) -> bool:
+ return arg.startswith(cls.prepend_prefixes)
+
+ def to_native(self, copy: bool = False) -> T.List[str]:
+ # Check if we need to add --start/end-group for circular dependencies
+ # between static libraries, and for recursively searching for symbols
+ # needed by static libraries that are provided by object files or
+ # shared libraries.
+ self.flush_pre_post()
+ if copy:
+ new = self.copy()
+ else:
+ new = self
+ return self.compiler.unix_args_to_native(new._container)
+
+ def append_direct(self, arg: str) -> None:
+ '''
+ Append the specified argument without any reordering or de-dup except
+ for absolute paths to libraries, etc, which can always be de-duped
+ safely.
+ '''
+ self.flush_pre_post()
+ if os.path.isabs(arg):
+ self.append(arg)
+ else:
+ self._container.append(arg)
+
+ def extend_direct(self, iterable: T.Iterable[str]) -> None:
+ '''
+ Extend using the elements in the specified iterable without any
+ reordering or de-dup except for absolute paths where the order of
+ include search directories is not relevant
+ '''
+ self.flush_pre_post()
+ for elem in iterable:
+ self.append_direct(elem)
+
+ def extend_preserving_lflags(self, iterable: T.Iterable[str]) -> None:
+ normal_flags = []
+ lflags = []
+ for i in iterable:
+ if i not in self.always_dedup_args and (i.startswith('-l') or i.startswith('-L')):
+ lflags.append(i)
+ else:
+ normal_flags.append(i)
+ self.extend(normal_flags)
+ self.extend_direct(lflags)
+
+ def __add__(self, args: T.Iterable[str]) -> 'CompilerArgs':
+ self.flush_pre_post()
+ new = self.copy()
+ new += args
+ return new
+
+ def __iadd__(self, args: T.Iterable[str]) -> 'CompilerArgs':
+ '''
+ Add two CompilerArgs while taking into account overriding of arguments
+ and while preserving the order of arguments as much as possible
+ '''
+ tmp_pre = collections.deque() # type: T.Deque[str]
+ if not isinstance(args, collections.abc.Iterable):
+ raise TypeError(f'can only concatenate Iterable[str] (not "{args}") to CompilerArgs')
+ for arg in args:
+ # If the argument can be de-duped, do it either by removing the
+ # previous occurrence of it and adding a new one, or not adding the
+ # new occurrence.
+ dedup = self._can_dedup(arg)
+ if dedup is Dedup.UNIQUE:
+ # Argument already exists and adding a new instance is useless
+ if arg in self._container or arg in self.pre or arg in self.post:
+ continue
+ if self._should_prepend(arg):
+ tmp_pre.appendleft(arg)
+ else:
+ self.post.append(arg)
+ self.pre.extendleft(tmp_pre)
+ #pre and post is going to be merged later before a iter call
+ return self
+
+ def __radd__(self, args: T.Iterable[str]) -> 'CompilerArgs':
+ self.flush_pre_post()
+ new = type(self)(self.compiler, args)
+ new += self
+ return new
+
+ def __eq__(self, other: object) -> T.Union[bool]:
+ self.flush_pre_post()
+ # Only allow equality checks against other CompilerArgs and lists instances
+ if isinstance(other, CompilerArgs):
+ return self.compiler == other.compiler and self._container == other._container
+ elif isinstance(other, list):
+ return self._container == other
+ return NotImplemented
+
+ def append(self, arg: str) -> None:
+ self.__iadd__([arg])
+
+ def extend(self, args: T.Iterable[str]) -> None:
+ self.__iadd__(args)
+
+ def __repr__(self) -> str:
+ self.flush_pre_post()
+ return f'CompilerArgs({self.compiler!r}, {self._container!r})'
diff --git a/meson/mesonbuild/ast/__init__.py b/meson/mesonbuild/ast/__init__.py
new file mode 100644
index 000000000..4fb56cb86
--- /dev/null
+++ b/meson/mesonbuild/ast/__init__.py
@@ -0,0 +1,34 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+__all__ = [
+ 'AstConditionLevel',
+ 'AstInterpreter',
+ 'AstIDGenerator',
+ 'AstIndentationGenerator',
+ 'AstJSONPrinter',
+ 'AstVisitor',
+ 'AstPrinter',
+ 'IntrospectionInterpreter',
+ 'build_target_functions',
+]
+
+from .interpreter import AstInterpreter
+from .introspection import IntrospectionInterpreter, build_target_functions
+from .visitor import AstVisitor
+from .postprocess import AstConditionLevel, AstIDGenerator, AstIndentationGenerator
+from .printer import AstPrinter, AstJSONPrinter
diff --git a/meson/mesonbuild/ast/interpreter.py b/meson/mesonbuild/ast/interpreter.py
new file mode 100644
index 000000000..19b3a1d21
--- /dev/null
+++ b/meson/mesonbuild/ast/interpreter.py
@@ -0,0 +1,424 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+from .visitor import AstVisitor
+from .. import mparser, mesonlib
+from .. import environment
+
+from ..interpreterbase import (
+ MesonInterpreterObject,
+ InterpreterBase,
+ InvalidArguments,
+ BreakRequest,
+ ContinueRequest,
+ default_resolve_key,
+ TYPE_nvar,
+ TYPE_nkwargs,
+)
+
+from ..mparser import (
+ AndNode,
+ ArgumentNode,
+ ArithmeticNode,
+ ArrayNode,
+ AssignmentNode,
+ BaseNode,
+ ComparisonNode,
+ ElementaryNode,
+ EmptyNode,
+ ForeachClauseNode,
+ IdNode,
+ IfClauseNode,
+ IndexNode,
+ MethodNode,
+ NotNode,
+ OrNode,
+ PlusAssignmentNode,
+ TernaryNode,
+ UMinusNode,
+)
+
+import os, sys
+import typing as T
+
+class DontCareObject(MesonInterpreterObject):
+ pass
+
+class MockExecutable(MesonInterpreterObject):
+ pass
+
+class MockStaticLibrary(MesonInterpreterObject):
+ pass
+
+class MockSharedLibrary(MesonInterpreterObject):
+ pass
+
+class MockCustomTarget(MesonInterpreterObject):
+ pass
+
+class MockRunTarget(MesonInterpreterObject):
+ pass
+
+ADD_SOURCE = 0
+REMOVE_SOURCE = 1
+
+_T = T.TypeVar('_T')
+_V = T.TypeVar('_V')
+
+class AstInterpreter(InterpreterBase):
+ def __init__(self, source_root: str, subdir: str, subproject: str, visitors: T.Optional[T.List[AstVisitor]] = None):
+ super().__init__(source_root, subdir, subproject)
+ self.visitors = visitors if visitors is not None else []
+ self.processed_buildfiles = set() # type: T.Set[str]
+ self.assignments = {} # type: T.Dict[str, BaseNode]
+ self.assign_vals = {} # type: T.Dict[str, T.Any]
+ self.reverse_assignment = {} # type: T.Dict[str, BaseNode]
+ self.funcs.update({'project': self.func_do_nothing,
+ 'test': self.func_do_nothing,
+ 'benchmark': self.func_do_nothing,
+ 'install_headers': self.func_do_nothing,
+ 'install_man': self.func_do_nothing,
+ 'install_data': self.func_do_nothing,
+ 'install_subdir': self.func_do_nothing,
+ 'configuration_data': self.func_do_nothing,
+ 'configure_file': self.func_do_nothing,
+ 'find_program': self.func_do_nothing,
+ 'include_directories': self.func_do_nothing,
+ 'add_global_arguments': self.func_do_nothing,
+ 'add_global_link_arguments': self.func_do_nothing,
+ 'add_project_arguments': self.func_do_nothing,
+ 'add_project_link_arguments': self.func_do_nothing,
+ 'message': self.func_do_nothing,
+ 'generator': self.func_do_nothing,
+ 'error': self.func_do_nothing,
+ 'run_command': self.func_do_nothing,
+ 'assert': self.func_do_nothing,
+ 'subproject': self.func_do_nothing,
+ 'dependency': self.func_do_nothing,
+ 'get_option': self.func_do_nothing,
+ 'join_paths': self.func_do_nothing,
+ 'environment': self.func_do_nothing,
+ 'import': self.func_do_nothing,
+ 'vcs_tag': self.func_do_nothing,
+ 'add_languages': self.func_do_nothing,
+ 'declare_dependency': self.func_do_nothing,
+ 'files': self.func_do_nothing,
+ 'executable': self.func_do_nothing,
+ 'static_library': self.func_do_nothing,
+ 'shared_library': self.func_do_nothing,
+ 'library': self.func_do_nothing,
+ 'build_target': self.func_do_nothing,
+ 'custom_target': self.func_do_nothing,
+ 'run_target': self.func_do_nothing,
+ 'subdir': self.func_subdir,
+ 'set_variable': self.func_do_nothing,
+ 'get_variable': self.func_do_nothing,
+ 'is_disabler': self.func_do_nothing,
+ 'is_variable': self.func_do_nothing,
+ 'disabler': self.func_do_nothing,
+ 'gettext': self.func_do_nothing,
+ 'jar': self.func_do_nothing,
+ 'warning': self.func_do_nothing,
+ 'shared_module': self.func_do_nothing,
+ 'option': self.func_do_nothing,
+ 'both_libraries': self.func_do_nothing,
+ 'add_test_setup': self.func_do_nothing,
+ 'find_library': self.func_do_nothing,
+ 'subdir_done': self.func_do_nothing,
+ 'alias_target': self.func_do_nothing,
+ 'summary': self.func_do_nothing,
+ 'range': self.func_do_nothing,
+ })
+
+ def _unholder_args(self, args: _T, kwargs: _V) -> T.Tuple[_T, _V]:
+ return args, kwargs
+
+ def _holderify(self, res: _T) -> _T:
+ return res
+
+ def func_do_nothing(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> bool:
+ return True
+
+ def load_root_meson_file(self) -> None:
+ super().load_root_meson_file()
+ for i in self.visitors:
+ self.ast.accept(i)
+
+ def func_subdir(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> None:
+ args = self.flatten_args(args)
+ if len(args) != 1 or not isinstance(args[0], str):
+ sys.stderr.write(f'Unable to evaluate subdir({args}) in AstInterpreter --> Skipping\n')
+ return
+
+ prev_subdir = self.subdir
+ subdir = os.path.join(prev_subdir, args[0])
+ absdir = os.path.join(self.source_root, subdir)
+ buildfilename = os.path.join(subdir, environment.build_filename)
+ absname = os.path.join(self.source_root, buildfilename)
+ symlinkless_dir = os.path.realpath(absdir)
+ build_file = os.path.join(symlinkless_dir, 'meson.build')
+ if build_file in self.processed_buildfiles:
+ sys.stderr.write('Trying to enter {} which has already been visited --> Skipping\n'.format(args[0]))
+ return
+ self.processed_buildfiles.add(build_file)
+
+ if not os.path.isfile(absname):
+ sys.stderr.write(f'Unable to find build file {buildfilename} --> Skipping\n')
+ return
+ with open(absname, encoding='utf-8') as f:
+ code = f.read()
+ assert(isinstance(code, str))
+ try:
+ codeblock = mparser.Parser(code, absname).parse()
+ except mesonlib.MesonException as me:
+ me.file = absname
+ raise me
+
+ self.subdir = subdir
+ for i in self.visitors:
+ codeblock.accept(i)
+ self.evaluate_codeblock(codeblock)
+ self.subdir = prev_subdir
+
+ def method_call(self, node: BaseNode) -> bool:
+ return True
+
+ def evaluate_fstring(self, node: mparser.FormatStringNode) -> str:
+ assert(isinstance(node, mparser.FormatStringNode))
+ return node.value
+
+ def evaluate_arithmeticstatement(self, cur: ArithmeticNode) -> int:
+ self.evaluate_statement(cur.left)
+ self.evaluate_statement(cur.right)
+ return 0
+
+ def evaluate_uminusstatement(self, cur: UMinusNode) -> int:
+ self.evaluate_statement(cur.value)
+ return 0
+
+ def evaluate_ternary(self, node: TernaryNode) -> None:
+ assert(isinstance(node, TernaryNode))
+ self.evaluate_statement(node.condition)
+ self.evaluate_statement(node.trueblock)
+ self.evaluate_statement(node.falseblock)
+
+ def evaluate_dictstatement(self, node: mparser.DictNode) -> TYPE_nkwargs:
+ def resolve_key(node: mparser.BaseNode) -> str:
+ if isinstance(node, mparser.StringNode):
+ return node.value
+ return '__AST_UNKNOWN__'
+ arguments, kwargs = self.reduce_arguments(node.args, key_resolver=resolve_key)
+ assert (not arguments)
+ self.argument_depth += 1
+ for key, value in kwargs.items():
+ if isinstance(key, BaseNode):
+ self.evaluate_statement(key)
+ self.argument_depth -= 1
+ return {}
+
+ def evaluate_plusassign(self, node: PlusAssignmentNode) -> None:
+ assert(isinstance(node, PlusAssignmentNode))
+ # Cheat by doing a reassignment
+ self.assignments[node.var_name] = node.value # Save a reference to the value node
+ if node.value.ast_id:
+ self.reverse_assignment[node.value.ast_id] = node
+ self.assign_vals[node.var_name] = self.evaluate_statement(node.value)
+
+ def evaluate_indexing(self, node: IndexNode) -> int:
+ return 0
+
+ def unknown_function_called(self, func_name: str) -> None:
+ pass
+
+ def reduce_arguments(
+ self,
+ args: mparser.ArgumentNode,
+ key_resolver: T.Callable[[mparser.BaseNode], str] = default_resolve_key,
+ duplicate_key_error: T.Optional[str] = None,
+ ) -> T.Tuple[T.List[TYPE_nvar], TYPE_nkwargs]:
+ if isinstance(args, ArgumentNode):
+ kwargs = {} # type: T.Dict[str, TYPE_nvar]
+ for key, val in args.kwargs.items():
+ kwargs[key_resolver(key)] = val
+ if args.incorrect_order():
+ raise InvalidArguments('All keyword arguments must be after positional arguments.')
+ return self.flatten_args(args.arguments), kwargs
+ else:
+ return self.flatten_args(args), {}
+
+ def evaluate_comparison(self, node: ComparisonNode) -> bool:
+ self.evaluate_statement(node.left)
+ self.evaluate_statement(node.right)
+ return False
+
+ def evaluate_andstatement(self, cur: AndNode) -> bool:
+ self.evaluate_statement(cur.left)
+ self.evaluate_statement(cur.right)
+ return False
+
+ def evaluate_orstatement(self, cur: OrNode) -> bool:
+ self.evaluate_statement(cur.left)
+ self.evaluate_statement(cur.right)
+ return False
+
+ def evaluate_notstatement(self, cur: NotNode) -> bool:
+ self.evaluate_statement(cur.value)
+ return False
+
+ def evaluate_foreach(self, node: ForeachClauseNode) -> None:
+ try:
+ self.evaluate_codeblock(node.block)
+ except ContinueRequest:
+ pass
+ except BreakRequest:
+ pass
+
+ def evaluate_if(self, node: IfClauseNode) -> None:
+ for i in node.ifs:
+ self.evaluate_codeblock(i.block)
+ if not isinstance(node.elseblock, EmptyNode):
+ self.evaluate_codeblock(node.elseblock)
+
+ def get_variable(self, varname: str) -> int:
+ return 0
+
+ def assignment(self, node: AssignmentNode) -> None:
+ assert(isinstance(node, AssignmentNode))
+ self.assignments[node.var_name] = node.value # Save a reference to the value node
+ if node.value.ast_id:
+ self.reverse_assignment[node.value.ast_id] = node
+ self.assign_vals[node.var_name] = self.evaluate_statement(node.value) # Evaluate the value just in case
+
+ def resolve_node(self, node: BaseNode, include_unknown_args: bool = False, id_loop_detect: T.Optional[T.List[str]] = None) -> T.Optional[T.Any]:
+ def quick_resolve(n: BaseNode, loop_detect: T.Optional[T.List[str]] = None) -> T.Any:
+ if loop_detect is None:
+ loop_detect = []
+ if isinstance(n, IdNode):
+ assert isinstance(n.value, str)
+ if n.value in loop_detect or n.value not in self.assignments:
+ return []
+ return quick_resolve(self.assignments[n.value], loop_detect = loop_detect + [n.value])
+ elif isinstance(n, ElementaryNode):
+ return n.value
+ else:
+ return n
+
+ if id_loop_detect is None:
+ id_loop_detect = []
+ result = None
+
+ if not isinstance(node, BaseNode):
+ return None
+
+ assert node.ast_id
+ if node.ast_id in id_loop_detect:
+ return None # Loop detected
+ id_loop_detect += [node.ast_id]
+
+ # Try to evealuate the value of the node
+ if isinstance(node, IdNode):
+ result = quick_resolve(node)
+
+ elif isinstance(node, ElementaryNode):
+ result = node.value
+
+ elif isinstance(node, NotNode):
+ result = self.resolve_node(node.value, include_unknown_args, id_loop_detect)
+ if isinstance(result, bool):
+ result = not result
+
+ elif isinstance(node, ArrayNode):
+ result = [x for x in node.args.arguments]
+
+ elif isinstance(node, ArgumentNode):
+ result = [x for x in node.arguments]
+
+ elif isinstance(node, ArithmeticNode):
+ if node.operation != 'add':
+ return None # Only handle string and array concats
+ l = quick_resolve(node.left)
+ r = quick_resolve(node.right)
+ if isinstance(l, str) and isinstance(r, str):
+ result = l + r # String concatenation detected
+ else:
+ result = self.flatten_args(l, include_unknown_args, id_loop_detect) + self.flatten_args(r, include_unknown_args, id_loop_detect)
+
+ elif isinstance(node, MethodNode):
+ src = quick_resolve(node.source_object)
+ margs = self.flatten_args(node.args.arguments, include_unknown_args, id_loop_detect)
+ mkwargs = {} # type: T.Dict[str, TYPE_nvar]
+ try:
+ if isinstance(src, str):
+ result = self.string_method_call(src, node.name, margs, mkwargs)
+ elif isinstance(src, bool):
+ result = self.bool_method_call(src, node.name, margs, mkwargs)
+ elif isinstance(src, int):
+ result = self.int_method_call(src, node.name, margs, mkwargs)
+ elif isinstance(src, list):
+ result = self.array_method_call(src, node.name, margs, mkwargs)
+ elif isinstance(src, dict):
+ result = self.dict_method_call(src, node.name, margs, mkwargs)
+ except mesonlib.MesonException:
+ return None
+
+ # Ensure that the result is fully resolved (no more nodes)
+ if isinstance(result, BaseNode):
+ result = self.resolve_node(result, include_unknown_args, id_loop_detect)
+ elif isinstance(result, list):
+ new_res = [] # type: T.List[TYPE_nvar]
+ for i in result:
+ if isinstance(i, BaseNode):
+ resolved = self.resolve_node(i, include_unknown_args, id_loop_detect)
+ if resolved is not None:
+ new_res += self.flatten_args(resolved, include_unknown_args, id_loop_detect)
+ else:
+ new_res += [i]
+ result = new_res
+
+ return result
+
+ def flatten_args(self, args_raw: T.Union[TYPE_nvar, T.Sequence[TYPE_nvar]], include_unknown_args: bool = False, id_loop_detect: T.Optional[T.List[str]] = None) -> T.List[TYPE_nvar]:
+ # Make sure we are always dealing with lists
+ if isinstance(args_raw, list):
+ args = args_raw
+ else:
+ args = [args_raw]
+
+ flattend_args = [] # type: T.List[TYPE_nvar]
+
+ # Resolve the contents of args
+ for i in args:
+ if isinstance(i, BaseNode):
+ resolved = self.resolve_node(i, include_unknown_args, id_loop_detect)
+ if resolved is not None:
+ if not isinstance(resolved, list):
+ resolved = [resolved]
+ flattend_args += resolved
+ elif isinstance(i, (str, bool, int, float)) or include_unknown_args:
+ flattend_args += [i]
+ return flattend_args
+
+ def flatten_kwargs(self, kwargs: T.Dict[str, TYPE_nvar], include_unknown_args: bool = False) -> T.Dict[str, TYPE_nvar]:
+ flattend_kwargs = {}
+ for key, val in kwargs.items():
+ if isinstance(val, BaseNode):
+ resolved = self.resolve_node(val, include_unknown_args)
+ if resolved is not None:
+ flattend_kwargs[key] = resolved
+ elif isinstance(val, (str, bool, int, float)) or include_unknown_args:
+ flattend_kwargs[key] = val
+ return flattend_kwargs
diff --git a/meson/mesonbuild/ast/introspection.py b/meson/mesonbuild/ast/introspection.py
new file mode 100644
index 000000000..42813db66
--- /dev/null
+++ b/meson/mesonbuild/ast/introspection.py
@@ -0,0 +1,330 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool
+
+from .interpreter import AstInterpreter
+from .visitor import AstVisitor
+from .. import compilers, environment, mesonlib, optinterpreter
+from .. import coredata as cdata
+from ..mesonlib import MachineChoice, OptionKey
+from ..interpreterbase import InvalidArguments, TYPE_nvar
+from ..build import BuildTarget, Executable, Jar, SharedLibrary, SharedModule, StaticLibrary
+from ..mparser import BaseNode, ArithmeticNode, ArrayNode, ElementaryNode, IdNode, FunctionNode, StringNode
+from ..compilers import detect_compiler_for
+import typing as T
+import os
+import argparse
+
+build_target_functions = ['executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library', 'both_libraries']
+
+class IntrospectionHelper(argparse.Namespace):
+ # mimic an argparse namespace
+ def __init__(self, cross_file: str):
+ super().__init__()
+ self.cross_file = cross_file # type: str
+ self.native_file = None # type: str
+ self.cmd_line_options = {} # type: T.Dict[str, str]
+
+ def __eq__(self, other: object) -> bool:
+ return NotImplemented
+
+class IntrospectionInterpreter(AstInterpreter):
+ # Interpreter to detect the options without a build directory
+ # Most of the code is stolen from interpreter.Interpreter
+ def __init__(self,
+ source_root: str,
+ subdir: str,
+ backend: str,
+ visitors: T.Optional[T.List[AstVisitor]] = None,
+ cross_file: T.Optional[str] = None,
+ subproject: str = '',
+ subproject_dir: str = 'subprojects',
+ env: T.Optional[environment.Environment] = None):
+ visitors = visitors if visitors is not None else []
+ super().__init__(source_root, subdir, subproject, visitors=visitors)
+
+ options = IntrospectionHelper(cross_file)
+ self.cross_file = cross_file
+ if env is None:
+ self.environment = environment.Environment(source_root, None, options)
+ else:
+ self.environment = env
+ self.subproject_dir = subproject_dir
+ self.coredata = self.environment.get_coredata()
+ self.option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt')
+ self.backend = backend
+ self.default_options = {OptionKey('backend'): self.backend}
+ self.project_data = {} # type: T.Dict[str, T.Any]
+ self.targets = [] # type: T.List[T.Dict[str, T.Any]]
+ self.dependencies = [] # type: T.List[T.Dict[str, T.Any]]
+ self.project_node = None # type: BaseNode
+
+ self.funcs.update({
+ 'add_languages': self.func_add_languages,
+ 'dependency': self.func_dependency,
+ 'executable': self.func_executable,
+ 'jar': self.func_jar,
+ 'library': self.func_library,
+ 'project': self.func_project,
+ 'shared_library': self.func_shared_lib,
+ 'shared_module': self.func_shared_module,
+ 'static_library': self.func_static_lib,
+ 'both_libraries': self.func_both_lib,
+ })
+
+ def func_project(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> None:
+ if self.project_node:
+ raise InvalidArguments('Second call to project()')
+ self.project_node = node
+ if len(args) < 1:
+ raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.')
+
+ proj_name = args[0]
+ proj_vers = kwargs.get('version', 'undefined')
+ proj_langs = self.flatten_args(args[1:])
+ if isinstance(proj_vers, ElementaryNode):
+ proj_vers = proj_vers.value
+ if not isinstance(proj_vers, str):
+ proj_vers = 'undefined'
+ self.project_data = {'descriptive_name': proj_name, 'version': proj_vers}
+
+ if os.path.exists(self.option_file):
+ oi = optinterpreter.OptionInterpreter(self.subproject)
+ oi.process(self.option_file)
+ self.coredata.update_project_options(oi.options)
+
+ def_opts = self.flatten_args(kwargs.get('default_options', []))
+ _project_default_options = mesonlib.stringlistify(def_opts)
+ self.project_default_options = cdata.create_options_dict(_project_default_options, self.subproject)
+ self.default_options.update(self.project_default_options)
+ self.coredata.set_default_options(self.default_options, self.subproject, self.environment)
+
+ if not self.is_subproject() and 'subproject_dir' in kwargs:
+ spdirname = kwargs['subproject_dir']
+ if isinstance(spdirname, StringNode):
+ assert isinstance(spdirname.value, str)
+ self.subproject_dir = spdirname.value
+ if not self.is_subproject():
+ self.project_data['subprojects'] = []
+ subprojects_dir = os.path.join(self.source_root, self.subproject_dir)
+ if os.path.isdir(subprojects_dir):
+ for i in os.listdir(subprojects_dir):
+ if os.path.isdir(os.path.join(subprojects_dir, i)):
+ self.do_subproject(i)
+
+ self.coredata.init_backend_options(self.backend)
+ options = {k: v for k, v in self.environment.options.items() if k.is_backend()}
+
+ self.coredata.set_options(options)
+ self._add_languages(proj_langs, MachineChoice.HOST)
+ self._add_languages(proj_langs, MachineChoice.BUILD)
+
+ def do_subproject(self, dirname: str) -> None:
+ subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir)
+ subpr = os.path.join(subproject_dir_abs, dirname)
+ try:
+ subi = IntrospectionInterpreter(subpr, '', self.backend, cross_file=self.cross_file, subproject=dirname, subproject_dir=self.subproject_dir, env=self.environment, visitors=self.visitors)
+ subi.analyze()
+ subi.project_data['name'] = dirname
+ self.project_data['subprojects'] += [subi.project_data]
+ except (mesonlib.MesonException, RuntimeError):
+ return
+
+ def func_add_languages(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> None:
+ kwargs = self.flatten_kwargs(kwargs)
+ if 'native' in kwargs:
+ native = kwargs.get('native', False)
+ self._add_languages(args, MachineChoice.BUILD if native else MachineChoice.HOST)
+ else:
+ for for_machine in [MachineChoice.BUILD, MachineChoice.HOST]:
+ self._add_languages(args, for_machine)
+
+ def _add_languages(self, raw_langs: T.List[TYPE_nvar], for_machine: MachineChoice) -> None:
+ langs = [] # type: T.List[str]
+ for l in self.flatten_args(raw_langs):
+ if isinstance(l, str):
+ langs.append(l)
+ elif isinstance(l, StringNode):
+ langs.append(l.value)
+
+ for lang in sorted(langs, key=compilers.sort_clink):
+ lang = lang.lower()
+ if lang not in self.coredata.compilers[for_machine]:
+ detect_compiler_for(self.environment, lang, for_machine)
+
+ def func_dependency(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> None:
+ args = self.flatten_args(args)
+ kwargs = self.flatten_kwargs(kwargs)
+ if not args:
+ return
+ name = args[0]
+ has_fallback = 'fallback' in kwargs
+ required = kwargs.get('required', True)
+ version = kwargs.get('version', [])
+ if not isinstance(version, list):
+ version = [version]
+ if isinstance(required, ElementaryNode):
+ required = required.value
+ if not isinstance(required, bool):
+ required = False
+ self.dependencies += [{
+ 'name': name,
+ 'required': required,
+ 'version': version,
+ 'has_fallback': has_fallback,
+ 'conditional': node.condition_level > 0,
+ 'node': node
+ }]
+
+ def build_target(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs_raw: T.Dict[str, TYPE_nvar], targetclass: T.Type[BuildTarget]) -> T.Optional[T.Dict[str, T.Any]]:
+ args = self.flatten_args(args)
+ if not args or not isinstance(args[0], str):
+ return None
+ name = args[0]
+ srcqueue = [node]
+ extra_queue = []
+
+ # Process the sources BEFORE flattening the kwargs, to preserve the original nodes
+ if 'sources' in kwargs_raw:
+ srcqueue += mesonlib.listify(kwargs_raw['sources'])
+
+ if 'extra_files' in kwargs_raw:
+ extra_queue += mesonlib.listify(kwargs_raw['extra_files'])
+
+ kwargs = self.flatten_kwargs(kwargs_raw, True)
+
+ def traverse_nodes(inqueue: T.List[BaseNode]) -> T.List[BaseNode]:
+ res = [] # type: T.List[BaseNode]
+ while inqueue:
+ curr = inqueue.pop(0)
+ arg_node = None
+ assert(isinstance(curr, BaseNode))
+ if isinstance(curr, FunctionNode):
+ arg_node = curr.args
+ elif isinstance(curr, ArrayNode):
+ arg_node = curr.args
+ elif isinstance(curr, IdNode):
+ # Try to resolve the ID and append the node to the queue
+ assert isinstance(curr.value, str)
+ var_name = curr.value
+ if var_name in self.assignments:
+ tmp_node = self.assignments[var_name]
+ if isinstance(tmp_node, (ArrayNode, IdNode, FunctionNode)):
+ inqueue += [tmp_node]
+ elif isinstance(curr, ArithmeticNode):
+ inqueue += [curr.left, curr.right]
+ if arg_node is None:
+ continue
+ arg_nodes = arg_node.arguments.copy()
+ # Pop the first element if the function is a build target function
+ if isinstance(curr, FunctionNode) and curr.func_name in build_target_functions:
+ arg_nodes.pop(0)
+ elemetary_nodes = [x for x in arg_nodes if isinstance(x, (str, StringNode))]
+ inqueue += [x for x in arg_nodes if isinstance(x, (FunctionNode, ArrayNode, IdNode, ArithmeticNode))]
+ if elemetary_nodes:
+ res += [curr]
+ return res
+
+ source_nodes = traverse_nodes(srcqueue)
+ extraf_nodes = traverse_nodes(extra_queue)
+
+ # Make sure nothing can crash when creating the build class
+ kwargs_reduced = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs and k in ['install', 'build_by_default', 'build_always']}
+ kwargs_reduced = {k: v.value if isinstance(v, ElementaryNode) else v for k, v in kwargs_reduced.items()}
+ kwargs_reduced = {k: v for k, v in kwargs_reduced.items() if not isinstance(v, BaseNode)}
+ for_machine = MachineChoice.HOST
+ objects = [] # type: T.List[T.Any]
+ empty_sources = [] # type: T.List[T.Any]
+ # Passing the unresolved sources list causes errors
+ target = targetclass(name, self.subdir, self.subproject, for_machine, empty_sources, objects, self.environment, kwargs_reduced)
+
+ new_target = {
+ 'name': target.get_basename(),
+ 'id': target.get_id(),
+ 'type': target.get_typename(),
+ 'defined_in': os.path.normpath(os.path.join(self.source_root, self.subdir, environment.build_filename)),
+ 'subdir': self.subdir,
+ 'build_by_default': target.build_by_default,
+ 'installed': target.should_install(),
+ 'outputs': target.get_outputs(),
+ 'sources': source_nodes,
+ 'extra_files': extraf_nodes,
+ 'kwargs': kwargs,
+ 'node': node,
+ }
+
+ self.targets += [new_target]
+ return new_target
+
+ def build_library(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]:
+ default_library = self.coredata.get_option(OptionKey('default_library'))
+ if default_library == 'shared':
+ return self.build_target(node, args, kwargs, SharedLibrary)
+ elif default_library == 'static':
+ return self.build_target(node, args, kwargs, StaticLibrary)
+ elif default_library == 'both':
+ return self.build_target(node, args, kwargs, SharedLibrary)
+ return None
+
+ def func_executable(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]:
+ return self.build_target(node, args, kwargs, Executable)
+
+ def func_static_lib(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]:
+ return self.build_target(node, args, kwargs, StaticLibrary)
+
+ def func_shared_lib(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]:
+ return self.build_target(node, args, kwargs, SharedLibrary)
+
+ def func_both_lib(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]:
+ return self.build_target(node, args, kwargs, SharedLibrary)
+
+ def func_shared_module(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]:
+ return self.build_target(node, args, kwargs, SharedModule)
+
+ def func_library(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]:
+ return self.build_library(node, args, kwargs)
+
+ def func_jar(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]:
+ return self.build_target(node, args, kwargs, Jar)
+
+ def func_build_target(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]:
+ if 'target_type' not in kwargs:
+ return None
+ target_type = kwargs.pop('target_type')
+ if isinstance(target_type, ElementaryNode):
+ target_type = target_type.value
+ if target_type == 'executable':
+ return self.build_target(node, args, kwargs, Executable)
+ elif target_type == 'shared_library':
+ return self.build_target(node, args, kwargs, SharedLibrary)
+ elif target_type == 'static_library':
+ return self.build_target(node, args, kwargs, StaticLibrary)
+ elif target_type == 'both_libraries':
+ return self.build_target(node, args, kwargs, SharedLibrary)
+ elif target_type == 'library':
+ return self.build_library(node, args, kwargs)
+ elif target_type == 'jar':
+ return self.build_target(node, args, kwargs, Jar)
+ return None
+
+ def is_subproject(self) -> bool:
+ return self.subproject != ''
+
+ def analyze(self) -> None:
+ self.load_root_meson_file()
+ self.sanity_check_ast()
+ self.parse_project()
+ self.run()
diff --git a/meson/mesonbuild/ast/postprocess.py b/meson/mesonbuild/ast/postprocess.py
new file mode 100644
index 000000000..6d808be57
--- /dev/null
+++ b/meson/mesonbuild/ast/postprocess.py
@@ -0,0 +1,117 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool
+
+from . import AstVisitor
+from .. import mparser
+import typing as T
+
+class AstIndentationGenerator(AstVisitor):
+ def __init__(self) -> None:
+ self.level = 0
+
+ def visit_default_func(self, node: mparser.BaseNode) -> None:
+ # Store the current level in the node
+ node.level = self.level
+
+ def visit_ArrayNode(self, node: mparser.ArrayNode) -> None:
+ self.visit_default_func(node)
+ self.level += 1
+ node.args.accept(self)
+ self.level -= 1
+
+ def visit_DictNode(self, node: mparser.DictNode) -> None:
+ self.visit_default_func(node)
+ self.level += 1
+ node.args.accept(self)
+ self.level -= 1
+
+ def visit_MethodNode(self, node: mparser.MethodNode) -> None:
+ self.visit_default_func(node)
+ node.source_object.accept(self)
+ self.level += 1
+ node.args.accept(self)
+ self.level -= 1
+
+ def visit_FunctionNode(self, node: mparser.FunctionNode) -> None:
+ self.visit_default_func(node)
+ self.level += 1
+ node.args.accept(self)
+ self.level -= 1
+
+ def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None:
+ self.visit_default_func(node)
+ self.level += 1
+ node.items.accept(self)
+ node.block.accept(self)
+ self.level -= 1
+
+ def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None:
+ self.visit_default_func(node)
+ for i in node.ifs:
+ i.accept(self)
+ if node.elseblock:
+ self.level += 1
+ node.elseblock.accept(self)
+ self.level -= 1
+
+ def visit_IfNode(self, node: mparser.IfNode) -> None:
+ self.visit_default_func(node)
+ self.level += 1
+ node.condition.accept(self)
+ node.block.accept(self)
+ self.level -= 1
+
+class AstIDGenerator(AstVisitor):
+ def __init__(self) -> None:
+ self.counter = {} # type: T.Dict[str, int]
+
+ def visit_default_func(self, node: mparser.BaseNode) -> None:
+ name = type(node).__name__
+ if name not in self.counter:
+ self.counter[name] = 0
+ node.ast_id = name + '#' + str(self.counter[name])
+ self.counter[name] += 1
+
+class AstConditionLevel(AstVisitor):
+ def __init__(self) -> None:
+ self.condition_level = 0
+
+ def visit_default_func(self, node: mparser.BaseNode) -> None:
+ node.condition_level = self.condition_level
+
+ def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None:
+ self.visit_default_func(node)
+ self.condition_level += 1
+ node.items.accept(self)
+ node.block.accept(self)
+ self.condition_level -= 1
+
+ def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None:
+ self.visit_default_func(node)
+ for i in node.ifs:
+ i.accept(self)
+ if node.elseblock:
+ self.condition_level += 1
+ node.elseblock.accept(self)
+ self.condition_level -= 1
+
+ def visit_IfNode(self, node: mparser.IfNode) -> None:
+ self.visit_default_func(node)
+ self.condition_level += 1
+ node.condition.accept(self)
+ node.block.accept(self)
+ self.condition_level -= 1
diff --git a/meson/mesonbuild/ast/printer.py b/meson/mesonbuild/ast/printer.py
new file mode 100644
index 000000000..f18544983
--- /dev/null
+++ b/meson/mesonbuild/ast/printer.py
@@ -0,0 +1,366 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool
+
+from .. import mparser
+from . import AstVisitor
+import re
+import typing as T
+
+arithmic_map = {
+ 'add': '+',
+ 'sub': '-',
+ 'mod': '%',
+ 'mul': '*',
+ 'div': '/'
+}
+
+class AstPrinter(AstVisitor):
+ def __init__(self, indent: int = 2, arg_newline_cutoff: int = 5):
+ self.result = ''
+ self.indent = indent
+ self.arg_newline_cutoff = arg_newline_cutoff
+ self.ci = ''
+ self.is_newline = True
+ self.last_level = 0
+
+ def post_process(self) -> None:
+ self.result = re.sub(r'\s+\n', '\n', self.result)
+
+ def append(self, data: str, node: mparser.BaseNode) -> None:
+ self.last_level = node.level
+ if self.is_newline:
+ self.result += ' ' * (node.level * self.indent)
+ self.result += data
+ self.is_newline = False
+
+ def append_padded(self, data: str, node: mparser.BaseNode) -> None:
+ if self.result and self.result[-1] not in [' ', '\n']:
+ data = ' ' + data
+ self.append(data + ' ', node)
+
+ def newline(self) -> None:
+ self.result += '\n'
+ self.is_newline = True
+
+ def visit_BooleanNode(self, node: mparser.BooleanNode) -> None:
+ self.append('true' if node.value else 'false', node)
+
+ def visit_IdNode(self, node: mparser.IdNode) -> None:
+ assert isinstance(node.value, str)
+ self.append(node.value, node)
+
+ def visit_NumberNode(self, node: mparser.NumberNode) -> None:
+ self.append(str(node.value), node)
+
+ def visit_StringNode(self, node: mparser.StringNode) -> None:
+ assert isinstance(node.value, str)
+ self.append("'" + node.value + "'", node)
+
+ def visit_FormatStringNode(self, node: mparser.FormatStringNode) -> None:
+ assert isinstance(node.value, str)
+ self.append("f'" + node.value + "'", node)
+
+ def visit_ContinueNode(self, node: mparser.ContinueNode) -> None:
+ self.append('continue', node)
+
+ def visit_BreakNode(self, node: mparser.BreakNode) -> None:
+ self.append('break', node)
+
+ def visit_ArrayNode(self, node: mparser.ArrayNode) -> None:
+ self.append('[', node)
+ node.args.accept(self)
+ self.append(']', node)
+
+ def visit_DictNode(self, node: mparser.DictNode) -> None:
+ self.append('{', node)
+ node.args.accept(self)
+ self.append('}', node)
+
+ def visit_OrNode(self, node: mparser.OrNode) -> None:
+ node.left.accept(self)
+ self.append_padded('or', node)
+ node.right.accept(self)
+
+ def visit_AndNode(self, node: mparser.AndNode) -> None:
+ node.left.accept(self)
+ self.append_padded('and', node)
+ node.right.accept(self)
+
+ def visit_ComparisonNode(self, node: mparser.ComparisonNode) -> None:
+ node.left.accept(self)
+ self.append_padded(node.ctype, node)
+ node.right.accept(self)
+
+ def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None:
+ node.left.accept(self)
+ self.append_padded(arithmic_map[node.operation], node)
+ node.right.accept(self)
+
+ def visit_NotNode(self, node: mparser.NotNode) -> None:
+ self.append_padded('not', node)
+ node.value.accept(self)
+
+ def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None:
+ for i in node.lines:
+ i.accept(self)
+ self.newline()
+
+ def visit_IndexNode(self, node: mparser.IndexNode) -> None:
+ node.iobject.accept(self)
+ self.append('[', node)
+ node.index.accept(self)
+ self.append(']', node)
+
+ def visit_MethodNode(self, node: mparser.MethodNode) -> None:
+ node.source_object.accept(self)
+ self.append('.' + node.name + '(', node)
+ node.args.accept(self)
+ self.append(')', node)
+
+ def visit_FunctionNode(self, node: mparser.FunctionNode) -> None:
+ self.append(node.func_name + '(', node)
+ node.args.accept(self)
+ self.append(')', node)
+
+ def visit_AssignmentNode(self, node: mparser.AssignmentNode) -> None:
+ self.append(node.var_name + ' = ', node)
+ node.value.accept(self)
+
+ def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode) -> None:
+ self.append(node.var_name + ' += ', node)
+ node.value.accept(self)
+
+ def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None:
+ varnames = [x for x in node.varnames]
+ self.append_padded('foreach', node)
+ self.append_padded(', '.join(varnames), node)
+ self.append_padded(':', node)
+ node.items.accept(self)
+ self.newline()
+ node.block.accept(self)
+ self.append('endforeach', node)
+
+ def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None:
+ prefix = ''
+ for i in node.ifs:
+ self.append_padded(prefix + 'if', node)
+ prefix = 'el'
+ i.accept(self)
+ if not isinstance(node.elseblock, mparser.EmptyNode):
+ self.append('else', node)
+ node.elseblock.accept(self)
+ self.append('endif', node)
+
+ def visit_UMinusNode(self, node: mparser.UMinusNode) -> None:
+ self.append_padded('-', node)
+ node.value.accept(self)
+
+ def visit_IfNode(self, node: mparser.IfNode) -> None:
+ node.condition.accept(self)
+ self.newline()
+ node.block.accept(self)
+
+ def visit_TernaryNode(self, node: mparser.TernaryNode) -> None:
+ node.condition.accept(self)
+ self.append_padded('?', node)
+ node.trueblock.accept(self)
+ self.append_padded(':', node)
+ node.falseblock.accept(self)
+
+ def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None:
+ break_args = (len(node.arguments) + len(node.kwargs)) > self.arg_newline_cutoff
+ for i in node.arguments + list(node.kwargs.values()):
+ if not isinstance(i, (mparser.ElementaryNode, mparser.IndexNode)):
+ break_args = True
+ if break_args:
+ self.newline()
+ for i in node.arguments:
+ i.accept(self)
+ self.append(', ', node)
+ if break_args:
+ self.newline()
+ for key, val in node.kwargs.items():
+ key.accept(self)
+ self.append_padded(':', node)
+ val.accept(self)
+ self.append(', ', node)
+ if break_args:
+ self.newline()
+ if break_args:
+ self.result = re.sub(r', \n$', '\n', self.result)
+ else:
+ self.result = re.sub(r', $', '', self.result)
+
+class AstJSONPrinter(AstVisitor):
+ def __init__(self) -> None:
+ self.result = {} # type: T.Dict[str, T.Any]
+ self.current = self.result
+
+ def _accept(self, key: str, node: mparser.BaseNode) -> None:
+ old = self.current
+ data = {} # type: T.Dict[str, T.Any]
+ self.current = data
+ node.accept(self)
+ self.current = old
+ self.current[key] = data
+
+ def _accept_list(self, key: str, nodes: T.Sequence[mparser.BaseNode]) -> None:
+ old = self.current
+ datalist = [] # type: T.List[T.Dict[str, T.Any]]
+ for i in nodes:
+ self.current = {}
+ i.accept(self)
+ datalist += [self.current]
+ self.current = old
+ self.current[key] = datalist
+
+ def _raw_accept(self, node: mparser.BaseNode, data: T.Dict[str, T.Any]) -> None:
+ old = self.current
+ self.current = data
+ node.accept(self)
+ self.current = old
+
+ def setbase(self, node: mparser.BaseNode) -> None:
+ self.current['node'] = type(node).__name__
+ self.current['lineno'] = node.lineno
+ self.current['colno'] = node.colno
+ self.current['end_lineno'] = node.end_lineno
+ self.current['end_colno'] = node.end_colno
+
+ def visit_default_func(self, node: mparser.BaseNode) -> None:
+ self.setbase(node)
+
+ def gen_ElementaryNode(self, node: mparser.ElementaryNode) -> None:
+ self.current['value'] = node.value
+ self.setbase(node)
+
+ def visit_BooleanNode(self, node: mparser.BooleanNode) -> None:
+ self.gen_ElementaryNode(node)
+
+ def visit_IdNode(self, node: mparser.IdNode) -> None:
+ self.gen_ElementaryNode(node)
+
+ def visit_NumberNode(self, node: mparser.NumberNode) -> None:
+ self.gen_ElementaryNode(node)
+
+ def visit_StringNode(self, node: mparser.StringNode) -> None:
+ self.gen_ElementaryNode(node)
+
+ def visit_FormatStringNode(self, node: mparser.FormatStringNode) -> None:
+ self.gen_ElementaryNode(node)
+
+ def visit_ArrayNode(self, node: mparser.ArrayNode) -> None:
+ self._accept('args', node.args)
+ self.setbase(node)
+
+ def visit_DictNode(self, node: mparser.DictNode) -> None:
+ self._accept('args', node.args)
+ self.setbase(node)
+
+ def visit_OrNode(self, node: mparser.OrNode) -> None:
+ self._accept('left', node.left)
+ self._accept('right', node.right)
+ self.setbase(node)
+
+ def visit_AndNode(self, node: mparser.AndNode) -> None:
+ self._accept('left', node.left)
+ self._accept('right', node.right)
+ self.setbase(node)
+
+ def visit_ComparisonNode(self, node: mparser.ComparisonNode) -> None:
+ self._accept('left', node.left)
+ self._accept('right', node.right)
+ self.current['ctype'] = node.ctype
+ self.setbase(node)
+
+ def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None:
+ self._accept('left', node.left)
+ self._accept('right', node.right)
+ self.current['op'] = arithmic_map[node.operation]
+ self.setbase(node)
+
+ def visit_NotNode(self, node: mparser.NotNode) -> None:
+ self._accept('right', node.value)
+ self.setbase(node)
+
+ def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None:
+ self._accept_list('lines', node.lines)
+ self.setbase(node)
+
+ def visit_IndexNode(self, node: mparser.IndexNode) -> None:
+ self._accept('object', node.iobject)
+ self._accept('index', node.index)
+ self.setbase(node)
+
+ def visit_MethodNode(self, node: mparser.MethodNode) -> None:
+ self._accept('object', node.source_object)
+ self._accept('args', node.args)
+ self.current['name'] = node.name
+ self.setbase(node)
+
+ def visit_FunctionNode(self, node: mparser.FunctionNode) -> None:
+ self._accept('args', node.args)
+ self.current['name'] = node.func_name
+ self.setbase(node)
+
+ def visit_AssignmentNode(self, node: mparser.AssignmentNode) -> None:
+ self._accept('value', node.value)
+ self.current['var_name'] = node.var_name
+ self.setbase(node)
+
+ def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode) -> None:
+ self._accept('value', node.value)
+ self.current['var_name'] = node.var_name
+ self.setbase(node)
+
+ def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None:
+ self._accept('items', node.items)
+ self._accept('block', node.block)
+ self.current['varnames'] = node.varnames
+ self.setbase(node)
+
+ def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None:
+ self._accept_list('ifs', node.ifs)
+ self._accept('else', node.elseblock)
+ self.setbase(node)
+
+ def visit_UMinusNode(self, node: mparser.UMinusNode) -> None:
+ self._accept('right', node.value)
+ self.setbase(node)
+
+ def visit_IfNode(self, node: mparser.IfNode) -> None:
+ self._accept('condition', node.condition)
+ self._accept('block', node.block)
+ self.setbase(node)
+
+ def visit_TernaryNode(self, node: mparser.TernaryNode) -> None:
+ self._accept('condition', node.condition)
+ self._accept('true', node.trueblock)
+ self._accept('false', node.falseblock)
+ self.setbase(node)
+
+ def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None:
+ self._accept_list('positional', node.arguments)
+ kwargs_list = [] # type: T.List[T.Dict[str, T.Dict[str, T.Any]]]
+ for key, val in node.kwargs.items():
+ key_res = {} # type: T.Dict[str, T.Any]
+ val_res = {} # type: T.Dict[str, T.Any]
+ self._raw_accept(key, key_res)
+ self._raw_accept(val, val_res)
+ kwargs_list += [{'key': key_res, 'val': val_res}]
+ self.current['kwargs'] = kwargs_list
+ self.setbase(node)
diff --git a/meson/mesonbuild/ast/visitor.py b/meson/mesonbuild/ast/visitor.py
new file mode 100644
index 000000000..34a76a8db
--- /dev/null
+++ b/meson/mesonbuild/ast/visitor.py
@@ -0,0 +1,142 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool
+
+from .. import mparser
+
+class AstVisitor:
+ def __init__(self) -> None:
+ pass
+
+ def visit_default_func(self, node: mparser.BaseNode) -> None:
+ pass
+
+ def visit_BooleanNode(self, node: mparser.BooleanNode) -> None:
+ self.visit_default_func(node)
+
+ def visit_IdNode(self, node: mparser.IdNode) -> None:
+ self.visit_default_func(node)
+
+ def visit_NumberNode(self, node: mparser.NumberNode) -> None:
+ self.visit_default_func(node)
+
+ def visit_StringNode(self, node: mparser.StringNode) -> None:
+ self.visit_default_func(node)
+
+ def visit_FormatStringNode(self, node: mparser.FormatStringNode) -> None:
+ self.visit_default_func(node)
+
+ def visit_ContinueNode(self, node: mparser.ContinueNode) -> None:
+ self.visit_default_func(node)
+
+ def visit_BreakNode(self, node: mparser.BreakNode) -> None:
+ self.visit_default_func(node)
+
+ def visit_ArrayNode(self, node: mparser.ArrayNode) -> None:
+ self.visit_default_func(node)
+ node.args.accept(self)
+
+ def visit_DictNode(self, node: mparser.DictNode) -> None:
+ self.visit_default_func(node)
+ node.args.accept(self)
+
+ def visit_EmptyNode(self, node: mparser.EmptyNode) -> None:
+ self.visit_default_func(node)
+
+ def visit_OrNode(self, node: mparser.OrNode) -> None:
+ self.visit_default_func(node)
+ node.left.accept(self)
+ node.right.accept(self)
+
+ def visit_AndNode(self, node: mparser.AndNode) -> None:
+ self.visit_default_func(node)
+ node.left.accept(self)
+ node.right.accept(self)
+
+ def visit_ComparisonNode(self, node: mparser.ComparisonNode) -> None:
+ self.visit_default_func(node)
+ node.left.accept(self)
+ node.right.accept(self)
+
+ def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None:
+ self.visit_default_func(node)
+ node.left.accept(self)
+ node.right.accept(self)
+
+ def visit_NotNode(self, node: mparser.NotNode) -> None:
+ self.visit_default_func(node)
+ node.value.accept(self)
+
+ def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None:
+ self.visit_default_func(node)
+ for i in node.lines:
+ i.accept(self)
+
+ def visit_IndexNode(self, node: mparser.IndexNode) -> None:
+ self.visit_default_func(node)
+ node.iobject.accept(self)
+ node.index.accept(self)
+
+ def visit_MethodNode(self, node: mparser.MethodNode) -> None:
+ self.visit_default_func(node)
+ node.source_object.accept(self)
+ node.args.accept(self)
+
+ def visit_FunctionNode(self, node: mparser.FunctionNode) -> None:
+ self.visit_default_func(node)
+ node.args.accept(self)
+
+ def visit_AssignmentNode(self, node: mparser.AssignmentNode) -> None:
+ self.visit_default_func(node)
+ node.value.accept(self)
+
+ def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode) -> None:
+ self.visit_default_func(node)
+ node.value.accept(self)
+
+ def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None:
+ self.visit_default_func(node)
+ node.items.accept(self)
+ node.block.accept(self)
+
+ def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None:
+ self.visit_default_func(node)
+ for i in node.ifs:
+ i.accept(self)
+ node.elseblock.accept(self)
+
+ def visit_UMinusNode(self, node: mparser.UMinusNode) -> None:
+ self.visit_default_func(node)
+ node.value.accept(self)
+
+ def visit_IfNode(self, node: mparser.IfNode) -> None:
+ self.visit_default_func(node)
+ node.condition.accept(self)
+ node.block.accept(self)
+
+ def visit_TernaryNode(self, node: mparser.TernaryNode) -> None:
+ self.visit_default_func(node)
+ node.condition.accept(self)
+ node.trueblock.accept(self)
+ node.falseblock.accept(self)
+
+ def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None:
+ self.visit_default_func(node)
+ for i in node.arguments:
+ i.accept(self)
+ for key, val in node.kwargs.items():
+ key.accept(self)
+ val.accept(self)
diff --git a/meson/mesonbuild/backend/__init__.py b/meson/mesonbuild/backend/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/meson/mesonbuild/backend/__init__.py
diff --git a/meson/mesonbuild/backend/backends.py b/meson/mesonbuild/backend/backends.py
new file mode 100644
index 000000000..aa8e844a7
--- /dev/null
+++ b/meson/mesonbuild/backend/backends.py
@@ -0,0 +1,1616 @@
+# Copyright 2012-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import OrderedDict
+from functools import lru_cache
+from itertools import chain
+from pathlib import Path
+import enum
+import json
+import os
+import pickle
+import re
+import typing as T
+import hashlib
+
+from .. import build
+from .. import dependencies
+from .. import programs
+from .. import mesonlib
+from .. import mlog
+from ..compilers import LANGUAGES_USING_LDFLAGS, detect
+from ..mesonlib import (
+ File, MachineChoice, MesonException, OptionType, OrderedSet, OptionOverrideProxy,
+ classify_unity_sources, OptionKey, join_args
+)
+
+if T.TYPE_CHECKING:
+ from .._typing import ImmutableListProtocol
+ from ..arglist import CompilerArgs
+ from ..compilers import Compiler
+ from ..interpreter import Interpreter, Test
+ from ..mesonlib import FileMode
+
+# Languages that can mix with C or C++ but don't support unity builds yet
+# because the syntax we use for unity builds is specific to C/++/ObjC/++.
+# Assembly files cannot be unitified and neither can LLVM IR files
+LANGS_CANT_UNITY = ('d', 'fortran', 'vala')
+
+class RegenInfo:
+ def __init__(self, source_dir, build_dir, depfiles):
+ self.source_dir = source_dir
+ self.build_dir = build_dir
+ self.depfiles = depfiles
+
+class TestProtocol(enum.Enum):
+
+ EXITCODE = 0
+ TAP = 1
+ GTEST = 2
+ RUST = 3
+
+ @classmethod
+ def from_str(cls, string: str) -> 'TestProtocol':
+ if string == 'exitcode':
+ return cls.EXITCODE
+ elif string == 'tap':
+ return cls.TAP
+ elif string == 'gtest':
+ return cls.GTEST
+ elif string == 'rust':
+ return cls.RUST
+ raise MesonException(f'unknown test format {string}')
+
+ def __str__(self) -> str:
+ cls = type(self)
+ if self is cls.EXITCODE:
+ return 'exitcode'
+ elif self is cls.GTEST:
+ return 'gtest'
+ elif self is cls.RUST:
+ return 'rust'
+ return 'tap'
+
+
+class CleanTrees:
+ '''
+ Directories outputted by custom targets that have to be manually cleaned
+ because on Linux `ninja clean` only deletes empty directories.
+ '''
+ def __init__(self, build_dir, trees):
+ self.build_dir = build_dir
+ self.trees = trees
+
+class InstallData:
+ def __init__(self, source_dir: str, build_dir: str, prefix: str,
+ strip_bin: T.List[str], install_umask: T.Union[str, int],
+ mesonintrospect: T.List[str], version: str):
+ # TODO: in python 3.8 or with typing_Extensions install_umask could be:
+ # `T.Union[T.Literal['preserve'], int]`, which would be more accurate.
+ self.source_dir = source_dir
+ self.build_dir = build_dir
+ self.prefix = prefix
+ self.strip_bin = strip_bin
+ self.install_umask = install_umask
+ self.targets: T.List[TargetInstallData] = []
+ self.headers: T.List[InstallDataBase] = []
+ self.man: T.List[InstallDataBase] = []
+ self.data: T.List[InstallDataBase] = []
+ self.install_scripts: T.List[ExecutableSerialisation] = []
+ self.install_subdirs: T.List[SubdirInstallData] = []
+ self.mesonintrospect = mesonintrospect
+ self.version = version
+
+class TargetInstallData:
+ def __init__(self, fname: str, outdir: str, aliases: T.Dict[str, str], strip: bool,
+ install_name_mappings: T.Dict, rpath_dirs_to_remove: T.Set[bytes],
+ install_rpath: str, install_mode: 'FileMode', subproject: str, optional: bool = False):
+ self.fname = fname
+ self.outdir = outdir
+ self.aliases = aliases
+ self.strip = strip
+ self.install_name_mappings = install_name_mappings
+ self.rpath_dirs_to_remove = rpath_dirs_to_remove
+ self.install_rpath = install_rpath
+ self.install_mode = install_mode
+ self.subproject = subproject
+ self.optional = optional
+
+class InstallDataBase:
+ def __init__(self, path: str, install_path: str, install_mode: 'FileMode', subproject: str):
+ self.path = path
+ self.install_path = install_path
+ self.install_mode = install_mode
+ self.subproject = subproject
+
+class SubdirInstallData(InstallDataBase):
+ def __init__(self, path: str, install_path: str, install_mode: 'FileMode', exclude, subproject: str):
+ super().__init__(path, install_path, install_mode, subproject)
+ self.exclude = exclude
+
+class ExecutableSerialisation:
+ def __init__(self, cmd_args, env: T.Optional[build.EnvironmentVariables] = None, exe_wrapper=None,
+ workdir=None, extra_paths=None, capture=None, feed=None) -> None:
+ self.cmd_args = cmd_args
+ self.env = env
+ if exe_wrapper is not None:
+ assert(isinstance(exe_wrapper, programs.ExternalProgram))
+ self.exe_runner = exe_wrapper
+ self.workdir = workdir
+ self.extra_paths = extra_paths
+ self.capture = capture
+ self.feed = feed
+ self.pickled = False
+ self.skip_if_destdir = False
+ self.verbose = False
+ self.subproject = ''
+
+class TestSerialisation:
+ def __init__(self, name: str, project: str, suite: T.List[str], fname: T.List[str],
+ is_cross_built: bool, exe_wrapper: T.Optional[programs.ExternalProgram],
+ needs_exe_wrapper: bool, is_parallel: bool, cmd_args: T.List[str],
+ env: build.EnvironmentVariables, should_fail: bool,
+ timeout: T.Optional[int], workdir: T.Optional[str],
+ extra_paths: T.List[str], protocol: TestProtocol, priority: int,
+ cmd_is_built: bool, depends: T.List[str], version: str):
+ self.name = name
+ self.project_name = project
+ self.suite = suite
+ self.fname = fname
+ self.is_cross_built = is_cross_built
+ if exe_wrapper is not None:
+ assert(isinstance(exe_wrapper, programs.ExternalProgram))
+ self.exe_runner = exe_wrapper
+ self.is_parallel = is_parallel
+ self.cmd_args = cmd_args
+ self.env = env
+ self.should_fail = should_fail
+ self.timeout = timeout
+ self.workdir = workdir
+ self.extra_paths = extra_paths
+ self.protocol = protocol
+ self.priority = priority
+ self.needs_exe_wrapper = needs_exe_wrapper
+ self.cmd_is_built = cmd_is_built
+ self.depends = depends
+ self.version = version
+
+
+def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, interpreter: T.Optional['Interpreter'] = None) -> T.Optional['Backend']:
+ if backend == 'ninja':
+ from . import ninjabackend
+ return ninjabackend.NinjaBackend(build, interpreter)
+ elif backend == 'vs':
+ from . import vs2010backend
+ return vs2010backend.autodetect_vs_version(build, interpreter)
+ elif backend == 'vs2010':
+ from . import vs2010backend
+ return vs2010backend.Vs2010Backend(build, interpreter)
+ elif backend == 'vs2012':
+ from . import vs2012backend
+ return vs2012backend.Vs2012Backend(build, interpreter)
+ elif backend == 'vs2013':
+ from . import vs2013backend
+ return vs2013backend.Vs2013Backend(build, interpreter)
+ elif backend == 'vs2015':
+ from . import vs2015backend
+ return vs2015backend.Vs2015Backend(build, interpreter)
+ elif backend == 'vs2017':
+ from . import vs2017backend
+ return vs2017backend.Vs2017Backend(build, interpreter)
+ elif backend == 'vs2019':
+ from . import vs2019backend
+ return vs2019backend.Vs2019Backend(build, interpreter)
+ elif backend == 'xcode':
+ from . import xcodebackend
+ return xcodebackend.XCodeBackend(build, interpreter)
+ return None
+
+# This class contains the basic functionality that is needed by all backends.
+# Feel free to move stuff in and out of it as you see fit.
+class Backend:
+ def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional['Interpreter']):
+ # Make it possible to construct a dummy backend
+ # This is used for introspection without a build directory
+ if build is None:
+ self.environment = None
+ return
+ self.build = build
+ self.interpreter = interpreter
+ self.environment = build.environment
+ self.processed_targets: T.Set[str] = set()
+ self.name = '<UNKNOWN>'
+ self.build_dir = self.environment.get_build_dir()
+ self.source_dir = self.environment.get_source_dir()
+ self.build_to_src = mesonlib.relpath(self.environment.get_source_dir(),
+ self.environment.get_build_dir())
+ self.src_to_build = mesonlib.relpath(self.environment.get_build_dir(),
+ self.environment.get_source_dir())
+
+ def generate(self) -> None:
+ raise RuntimeError(f'generate is not implemented in {type(self).__name__}')
+
+ def get_target_filename(self, t: T.Union[build.Target, build.CustomTargetIndex], *, warn_multi_output: bool = True):
+ if isinstance(t, build.CustomTarget):
+ if warn_multi_output and len(t.get_outputs()) != 1:
+ mlog.warning(f'custom_target {t.name!r} has more than one output! '
+ 'Using the first one.')
+ filename = t.get_outputs()[0]
+ elif isinstance(t, build.CustomTargetIndex):
+ filename = t.get_outputs()[0]
+ else:
+ assert(isinstance(t, build.BuildTarget))
+ filename = t.get_filename()
+ return os.path.join(self.get_target_dir(t), filename)
+
+ def get_target_filename_abs(self, target: T.Union[build.Target, build.CustomTargetIndex]) -> str:
+ return os.path.join(self.environment.get_build_dir(), self.get_target_filename(target))
+
+ def get_base_options_for_target(self, target: build.BuildTarget) -> OptionOverrideProxy:
+ return OptionOverrideProxy(target.option_overrides_base,
+ {k: v for k, v in self.environment.coredata.options.items()
+ if k.type in {OptionType.BASE, OptionType.BUILTIN}})
+
+ def get_compiler_options_for_target(self, target: build.BuildTarget) -> OptionOverrideProxy:
+ comp_reg = {k: v for k, v in self.environment.coredata.options.items() if k.is_compiler()}
+ comp_override = target.option_overrides_compiler
+ return OptionOverrideProxy(comp_override, comp_reg)
+
+ def get_option_for_target(self, option_name: 'OptionKey', target: build.BuildTarget):
+ if option_name in target.option_overrides_base:
+ override = target.option_overrides_base[option_name]
+ return self.environment.coredata.validate_option_value(option_name, override)
+ return self.environment.coredata.get_option(option_name.evolve(subproject=target.subproject))
+
+ def get_source_dir_include_args(self, target, compiler, *, absolute_path=False):
+ curdir = target.get_subdir()
+ if absolute_path:
+ lead = self.source_dir
+ else:
+ lead = self.build_to_src
+ tmppath = os.path.normpath(os.path.join(lead, curdir))
+ return compiler.get_include_args(tmppath, False)
+
+ def get_build_dir_include_args(self, target, compiler, *, absolute_path=False):
+ if absolute_path:
+ curdir = os.path.join(self.build_dir, target.get_subdir())
+ else:
+ curdir = target.get_subdir()
+ if curdir == '':
+ curdir = '.'
+ return compiler.get_include_args(curdir, False)
+
+ def get_target_filename_for_linking(self, target):
+ # On some platforms (msvc for instance), the file that is used for
+ # dynamic linking is not the same as the dynamic library itself. This
+ # file is called an import library, and we want to link against that.
+ # On all other platforms, we link to the library directly.
+ if isinstance(target, build.SharedLibrary):
+ link_lib = target.get_import_filename() or target.get_filename()
+ return os.path.join(self.get_target_dir(target), link_lib)
+ elif isinstance(target, build.StaticLibrary):
+ return os.path.join(self.get_target_dir(target), target.get_filename())
+ elif isinstance(target, (build.CustomTarget, build.CustomTargetIndex)):
+ if not target.is_linkable_target():
+ raise MesonException(f'Tried to link against custom target "{target.name}", which is not linkable.')
+ return os.path.join(self.get_target_dir(target), target.get_filename())
+ elif isinstance(target, build.Executable):
+ if target.import_filename:
+ return os.path.join(self.get_target_dir(target), target.get_import_filename())
+ else:
+ return None
+ raise AssertionError(f'BUG: Tried to link to {target!r} which is not linkable')
+
+ @lru_cache(maxsize=None)
+ def get_target_dir(self, target: build.Target) -> str:
+ if self.environment.coredata.get_option(OptionKey('layout')) == 'mirror':
+ dirname = target.get_subdir()
+ else:
+ dirname = 'meson-out'
+ return dirname
+
+ def get_target_dir_relative_to(self, t, o):
+ '''Get a target dir relative to another target's directory'''
+ target_dir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(t))
+ othert_dir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(o))
+ return os.path.relpath(target_dir, othert_dir)
+
+ def get_target_source_dir(self, target):
+ # if target dir is empty, avoid extraneous trailing / from os.path.join()
+ target_dir = self.get_target_dir(target)
+ if target_dir:
+ return os.path.join(self.build_to_src, target_dir)
+ return self.build_to_src
+
+ def get_target_private_dir(self, target: build.Target) -> str:
+ return os.path.join(self.get_target_filename(target, warn_multi_output=False) + '.p')
+
+ def get_target_private_dir_abs(self, target):
+ return os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target))
+
+ @lru_cache(maxsize=None)
+ def get_target_generated_dir(self, target, gensrc, src):
+ """
+ Takes a BuildTarget, a generator source (CustomTarget or GeneratedList),
+ and a generated source filename.
+ Returns the full path of the generated source relative to the build root
+ """
+ # CustomTarget generators output to the build dir of the CustomTarget
+ if isinstance(gensrc, (build.CustomTarget, build.CustomTargetIndex)):
+ return os.path.join(self.get_target_dir(gensrc), src)
+ # GeneratedList generators output to the private build directory of the
+ # target that the GeneratedList is used in
+ return os.path.join(self.get_target_private_dir(target), src)
+
+ def get_unity_source_file(self, target, suffix, number):
+ # There is a potential conflict here, but it is unlikely that
+ # anyone both enables unity builds and has a file called foo-unity.cpp.
+ osrc = f'{target.name}-unity{number}.{suffix}'
+ return mesonlib.File.from_built_file(self.get_target_private_dir(target), osrc)
+
+ def generate_unity_files(self, target, unity_src):
+ abs_files = []
+ result = []
+ compsrcs = classify_unity_sources(target.compilers.values(), unity_src)
+ unity_size = self.get_option_for_target(OptionKey('unity_size'), target)
+
+ def init_language_file(suffix, unity_file_number):
+ unity_src = self.get_unity_source_file(target, suffix, unity_file_number)
+ outfileabs = unity_src.absolute_path(self.environment.get_source_dir(),
+ self.environment.get_build_dir())
+ outfileabs_tmp = outfileabs + '.tmp'
+ abs_files.append(outfileabs)
+ outfileabs_tmp_dir = os.path.dirname(outfileabs_tmp)
+ if not os.path.exists(outfileabs_tmp_dir):
+ os.makedirs(outfileabs_tmp_dir)
+ result.append(unity_src)
+ return open(outfileabs_tmp, 'w', encoding='utf-8')
+
+ # For each language, generate unity source files and return the list
+ for comp, srcs in compsrcs.items():
+ files_in_current = unity_size + 1
+ unity_file_number = 0
+ ofile = None
+ for src in srcs:
+ if files_in_current >= unity_size:
+ if ofile:
+ ofile.close()
+ ofile = init_language_file(comp.get_default_suffix(), unity_file_number)
+ unity_file_number += 1
+ files_in_current = 0
+ ofile.write(f'#include<{src}>\n')
+ files_in_current += 1
+ if ofile:
+ ofile.close()
+
+ [mesonlib.replace_if_different(x, x + '.tmp') for x in abs_files]
+ return result
+
+ def relpath(self, todir, fromdir):
+ return os.path.relpath(os.path.join('dummyprefixdir', todir),
+ os.path.join('dummyprefixdir', fromdir))
+
+ def flatten_object_list(self, target, proj_dir_to_build_root=''):
+ obj_list = self._flatten_object_list(target, target.get_objects(), proj_dir_to_build_root)
+ return list(dict.fromkeys(obj_list))
+
+ def _flatten_object_list(self, target, objects, proj_dir_to_build_root):
+ obj_list = []
+ for obj in objects:
+ if isinstance(obj, str):
+ o = os.path.join(proj_dir_to_build_root,
+ self.build_to_src, target.get_subdir(), obj)
+ obj_list.append(o)
+ elif isinstance(obj, mesonlib.File):
+ if obj.is_built:
+ o = os.path.join(proj_dir_to_build_root,
+ obj.rel_to_builddir(self.build_to_src))
+ obj_list.append(o)
+ else:
+ o = os.path.join(proj_dir_to_build_root,
+ self.build_to_src)
+ obj_list.append(obj.rel_to_builddir(o))
+ elif isinstance(obj, build.ExtractedObjects):
+ if obj.recursive:
+ obj_list += self._flatten_object_list(obj.target, obj.objlist, proj_dir_to_build_root)
+ obj_list += self.determine_ext_objs(obj, proj_dir_to_build_root)
+ else:
+ raise MesonException('Unknown data type in object list.')
+ return obj_list
+
+ def is_swift_target(self, target):
+ for s in target.sources:
+ if s.endswith('swift'):
+ return True
+ return False
+
+ def determine_swift_dep_dirs(self, target):
+ result = []
+ for l in target.link_targets:
+ result.append(self.get_target_private_dir_abs(l))
+ return result
+
+ def get_executable_serialisation(self, cmd, workdir=None,
+ extra_bdeps=None, capture=None, feed=None,
+ env: T.Optional[build.EnvironmentVariables] = None):
+ exe = cmd[0]
+ cmd_args = cmd[1:]
+ if isinstance(exe, programs.ExternalProgram):
+ exe_cmd = exe.get_command()
+ exe_for_machine = exe.for_machine
+ elif isinstance(exe, build.BuildTarget):
+ exe_cmd = [self.get_target_filename_abs(exe)]
+ exe_for_machine = exe.for_machine
+ elif isinstance(exe, build.CustomTarget):
+ # The output of a custom target can either be directly runnable
+ # or not, that is, a script, a native binary or a cross compiled
+ # binary when exe wrapper is available and when it is not.
+ # This implementation is not exhaustive but it works in the
+ # common cases.
+ exe_cmd = [self.get_target_filename_abs(exe)]
+ exe_for_machine = MachineChoice.BUILD
+ elif isinstance(exe, mesonlib.File):
+ exe_cmd = [exe.rel_to_builddir(self.environment.source_dir)]
+ exe_for_machine = MachineChoice.BUILD
+ else:
+ exe_cmd = [exe]
+ exe_for_machine = MachineChoice.BUILD
+
+ machine = self.environment.machines[exe_for_machine]
+ if machine.is_windows() or machine.is_cygwin():
+ extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps or [])
+ else:
+ extra_paths = []
+
+ is_cross_built = not self.environment.machines.matches_build_machine(exe_for_machine)
+ if is_cross_built and self.environment.need_exe_wrapper():
+ exe_wrapper = self.environment.get_exe_wrapper()
+ if not exe_wrapper or not exe_wrapper.found():
+ msg = 'An exe_wrapper is needed but was not found. Please define one ' \
+ 'in cross file and check the command and/or add it to PATH.'
+ raise MesonException(msg)
+ else:
+ if exe_cmd[0].endswith('.jar'):
+ exe_cmd = ['java', '-jar'] + exe_cmd
+ elif exe_cmd[0].endswith('.exe') and not (mesonlib.is_windows() or mesonlib.is_cygwin() or mesonlib.is_wsl()):
+ exe_cmd = ['mono'] + exe_cmd
+ exe_wrapper = None
+
+ workdir = workdir or self.environment.get_build_dir()
+ return ExecutableSerialisation(exe_cmd + cmd_args, env,
+ exe_wrapper, workdir,
+ extra_paths, capture, feed)
+
+ def as_meson_exe_cmdline(self, tname, exe, cmd_args, workdir=None,
+ extra_bdeps=None, capture=None, feed=None,
+ force_serialize=False,
+ env: T.Optional[build.EnvironmentVariables] = None,
+ verbose: bool = False):
+ '''
+ Serialize an executable for running with a generator or a custom target
+ '''
+ cmd = [exe] + cmd_args
+ es = self.get_executable_serialisation(cmd, workdir, extra_bdeps, capture, feed, env)
+ es.verbose = verbose
+ reasons = []
+ if es.extra_paths:
+ reasons.append('to set PATH')
+
+ if es.exe_runner:
+ reasons.append('to use exe_wrapper')
+
+ if workdir:
+ reasons.append('to set workdir')
+
+ if any('\n' in c for c in es.cmd_args):
+ reasons.append('because command contains newlines')
+
+ if es.env and es.env.varnames:
+ reasons.append('to set env')
+
+ force_serialize = force_serialize or bool(reasons)
+
+ if capture:
+ reasons.append('to capture output')
+ if feed:
+ reasons.append('to feed input')
+
+ if not force_serialize:
+ if not capture and not feed:
+ return es.cmd_args, ''
+ args = []
+ if capture:
+ args += ['--capture', capture]
+ if feed:
+ args += ['--feed', feed]
+ return ((self.environment.get_build_command() +
+ ['--internal', 'exe'] + args + ['--'] + es.cmd_args),
+ ', '.join(reasons))
+
+ if isinstance(exe, (programs.ExternalProgram,
+ build.BuildTarget, build.CustomTarget)):
+ basename = exe.name
+ elif isinstance(exe, mesonlib.File):
+ basename = os.path.basename(exe.fname)
+ else:
+ basename = os.path.basename(exe)
+
+ # Can't just use exe.name here; it will likely be run more than once
+ # Take a digest of the cmd args, env, workdir, capture, and feed. This
+ # avoids collisions and also makes the name deterministic over
+ # regenerations which avoids a rebuild by Ninja because the cmdline
+ # stays the same.
+ data = bytes(str(es.env) + str(es.cmd_args) + str(es.workdir) + str(capture) + str(feed),
+ encoding='utf-8')
+ digest = hashlib.sha1(data).hexdigest()
+ scratch_file = f'meson_exe_{basename}_{digest}.dat'
+ exe_data = os.path.join(self.environment.get_scratch_dir(), scratch_file)
+ with open(exe_data, 'wb') as f:
+ pickle.dump(es, f)
+ return (self.environment.get_build_command() + ['--internal', 'exe', '--unpickle', exe_data],
+ ', '.join(reasons))
+
+ def serialize_tests(self):
+ test_data = os.path.join(self.environment.get_scratch_dir(), 'meson_test_setup.dat')
+ with open(test_data, 'wb') as datafile:
+ self.write_test_file(datafile)
+ benchmark_data = os.path.join(self.environment.get_scratch_dir(), 'meson_benchmark_setup.dat')
+ with open(benchmark_data, 'wb') as datafile:
+ self.write_benchmark_file(datafile)
+ return test_data, benchmark_data
+
+ def determine_linker_and_stdlib_args(self, target):
+ '''
+ If we're building a static library, there is only one static linker.
+ Otherwise, we query the target for the dynamic linker.
+ '''
+ if isinstance(target, build.StaticLibrary):
+ return self.build.static_linker[target.for_machine], []
+ l, stdlib_args = target.get_clink_dynamic_linker_and_stdlibs()
+ return l, stdlib_args
+
+ @staticmethod
+ def _libdir_is_system(libdir, compilers, env):
+ libdir = os.path.normpath(libdir)
+ for cc in compilers.values():
+ if libdir in cc.get_library_dirs(env):
+ return True
+ return False
+
+ def get_external_rpath_dirs(self, target):
+ dirs = set()
+ args = []
+ for lang in LANGUAGES_USING_LDFLAGS:
+ try:
+ args.extend(self.environment.coredata.get_external_link_args(target.for_machine, lang))
+ except Exception:
+ pass
+ # Match rpath formats:
+ # -Wl,-rpath=
+ # -Wl,-rpath,
+ rpath_regex = re.compile(r'-Wl,-rpath[=,]([^,]+)')
+ # Match solaris style compat runpath formats:
+ # -Wl,-R
+ # -Wl,-R,
+ runpath_regex = re.compile(r'-Wl,-R[,]?([^,]+)')
+ # Match symbols formats:
+ # -Wl,--just-symbols=
+ # -Wl,--just-symbols,
+ symbols_regex = re.compile(r'-Wl,--just-symbols[=,]([^,]+)')
+ for arg in args:
+ rpath_match = rpath_regex.match(arg)
+ if rpath_match:
+ for dir in rpath_match.group(1).split(':'):
+ dirs.add(dir)
+ runpath_match = runpath_regex.match(arg)
+ if runpath_match:
+ for dir in runpath_match.group(1).split(':'):
+ # The symbols arg is an rpath if the path is a directory
+ if Path(dir).is_dir():
+ dirs.add(dir)
+ symbols_match = symbols_regex.match(arg)
+ if symbols_match:
+ for dir in symbols_match.group(1).split(':'):
+ # Prevent usage of --just-symbols to specify rpath
+ if Path(dir).is_dir():
+ raise MesonException(f'Invalid arg for --just-symbols, {dir} is a directory.')
+ return dirs
+
+ def rpaths_for_bundled_shared_libraries(self, target, exclude_system=True):
+ paths = []
+ for dep in target.external_deps:
+ if not isinstance(dep, (dependencies.ExternalLibrary, dependencies.PkgConfigDependency)):
+ continue
+ la = dep.link_args
+ if len(la) != 1 or not os.path.isabs(la[0]):
+ continue
+ # The only link argument is an absolute path to a library file.
+ libpath = la[0]
+ libdir = os.path.dirname(libpath)
+ if exclude_system and self._libdir_is_system(libdir, target.compilers, self.environment):
+ # No point in adding system paths.
+ continue
+ # Don't remove rpaths specified in LDFLAGS.
+ if libdir in self.get_external_rpath_dirs(target):
+ continue
+ # Windows doesn't support rpaths, but we use this function to
+ # emulate rpaths by setting PATH, so also accept DLLs here
+ if os.path.splitext(libpath)[1] not in ['.dll', '.lib', '.so', '.dylib']:
+ continue
+ if libdir.startswith(self.environment.get_source_dir()):
+ rel_to_src = libdir[len(self.environment.get_source_dir()) + 1:]
+ assert not os.path.isabs(rel_to_src), f'rel_to_src: {rel_to_src} is absolute'
+ paths.append(os.path.join(self.build_to_src, rel_to_src))
+ else:
+ paths.append(libdir)
+ return paths
+
+ def determine_rpath_dirs(self, target: build.BuildTarget) -> T.Tuple[str, ...]:
+ if self.environment.coredata.get_option(OptionKey('layout')) == 'mirror':
+ result: OrderedSet[str] = target.get_link_dep_subdirs()
+ else:
+ result = OrderedSet()
+ result.add('meson-out')
+ result.update(self.rpaths_for_bundled_shared_libraries(target))
+ target.rpath_dirs_to_remove.update([d.encode('utf-8') for d in result])
+ return tuple(result)
+
+ @staticmethod
+ def canonicalize_filename(fname):
+ for ch in ('/', '\\', ':'):
+ fname = fname.replace(ch, '_')
+ return fname
+
+ def object_filename_from_source(self, target, source):
+ assert isinstance(source, mesonlib.File)
+ build_dir = self.environment.get_build_dir()
+ rel_src = source.rel_to_builddir(self.build_to_src)
+
+ # foo.vala files compile down to foo.c and then foo.c.o, not foo.vala.o
+ if rel_src.endswith(('.vala', '.gs')):
+ # See description in generate_vala_compile for this logic.
+ if source.is_built:
+ if os.path.isabs(rel_src):
+ rel_src = rel_src[len(build_dir) + 1:]
+ rel_src = os.path.relpath(rel_src, self.get_target_private_dir(target))
+ else:
+ rel_src = os.path.basename(rel_src)
+ # A meson- prefixed directory is reserved; hopefully no-one creates a file name with such a weird prefix.
+ source = 'meson-generated_' + rel_src[:-5] + '.c'
+ elif source.is_built:
+ if os.path.isabs(rel_src):
+ rel_src = rel_src[len(build_dir) + 1:]
+ targetdir = self.get_target_private_dir(target)
+ # A meson- prefixed directory is reserved; hopefully no-one creates a file name with such a weird prefix.
+ source = 'meson-generated_' + os.path.relpath(rel_src, targetdir)
+ else:
+ if os.path.isabs(rel_src):
+ # Use the absolute path directly to avoid file name conflicts
+ source = rel_src
+ else:
+ source = os.path.relpath(os.path.join(build_dir, rel_src),
+ os.path.join(self.environment.get_source_dir(), target.get_subdir()))
+ machine = self.environment.machines[target.for_machine]
+ return self.canonicalize_filename(source) + '.' + machine.get_object_suffix()
+
+ def determine_ext_objs(self, extobj, proj_dir_to_build_root):
+ result = []
+
+ # Merge sources and generated sources
+ sources = list(extobj.srclist)
+ for gensrc in extobj.genlist:
+ for s in gensrc.get_outputs():
+ path = self.get_target_generated_dir(extobj.target, gensrc, s)
+ dirpart, fnamepart = os.path.split(path)
+ sources.append(File(True, dirpart, fnamepart))
+
+ # Filter out headers and all non-source files
+ filtered_sources = []
+ for s in sources:
+ if self.environment.is_source(s) and not self.environment.is_header(s):
+ filtered_sources.append(s)
+ elif self.environment.is_object(s):
+ result.append(s.relative_name())
+ sources = filtered_sources
+
+ # extobj could contain only objects and no sources
+ if not sources:
+ return result
+
+ targetdir = self.get_target_private_dir(extobj.target)
+
+ # With unity builds, sources don't map directly to objects,
+ # we only support extracting all the objects in this mode,
+ # so just return all object files.
+ if self.is_unity(extobj.target):
+ compsrcs = classify_unity_sources(extobj.target.compilers.values(), sources)
+ sources = []
+ unity_size = self.get_option_for_target(OptionKey('unity_size'), extobj.target)
+
+ for comp, srcs in compsrcs.items():
+ if comp.language in LANGS_CANT_UNITY:
+ sources += srcs
+ continue
+ for i in range(len(srcs) // unity_size + 1):
+ osrc = self.get_unity_source_file(extobj.target,
+ comp.get_default_suffix(), i)
+ sources.append(osrc)
+
+ for osrc in sources:
+ objname = self.object_filename_from_source(extobj.target, osrc)
+ objpath = os.path.join(proj_dir_to_build_root, targetdir, objname)
+ result.append(objpath)
+
+ return result
+
+ def get_pch_include_args(self, compiler, target):
+ args = []
+ pchpath = self.get_target_private_dir(target)
+ includeargs = compiler.get_include_args(pchpath, False)
+ p = target.get_pch(compiler.get_language())
+ if p:
+ args += compiler.get_pch_use_args(pchpath, p[0])
+ return includeargs + args
+
+ def create_msvc_pch_implementation(self, target, lang, pch_header):
+ # We have to include the language in the file name, otherwise
+ # pch.c and pch.cpp will both end up as pch.obj in VS backends.
+ impl_name = f'meson_pch-{lang}.{lang}'
+ pch_rel_to_build = os.path.join(self.get_target_private_dir(target), impl_name)
+ # Make sure to prepend the build dir, since the working directory is
+ # not defined. Otherwise, we might create the file in the wrong path.
+ pch_file = os.path.join(self.build_dir, pch_rel_to_build)
+ os.makedirs(os.path.dirname(pch_file), exist_ok=True)
+
+ content = f'#include "{os.path.basename(pch_header)}"'
+ pch_file_tmp = pch_file + '.tmp'
+ with open(pch_file_tmp, 'w', encoding='utf-8') as f:
+ f.write(content)
+ mesonlib.replace_if_different(pch_file, pch_file_tmp)
+ return pch_rel_to_build
+
+ @staticmethod
+ def escape_extra_args(compiler, args):
+ # all backslashes in defines are doubly-escaped
+ extra_args = []
+ for arg in args:
+ if arg.startswith('-D') or arg.startswith('/D'):
+ arg = arg.replace('\\', '\\\\')
+ extra_args.append(arg)
+
+ return extra_args
+
+ def generate_basic_compiler_args(self, target: build.BuildTarget, compiler: 'Compiler', no_warn_args: bool = False) -> 'CompilerArgs':
+ # Create an empty commands list, and start adding arguments from
+ # various sources in the order in which they must override each other
+ # starting from hard-coded defaults followed by build options and so on.
+ commands = compiler.compiler_args()
+
+ copt_proxy = self.get_compiler_options_for_target(target)
+ # First, the trivial ones that are impossible to override.
+ #
+ # Add -nostdinc/-nostdinc++ if needed; can't be overridden
+ commands += self.get_no_stdlib_args(target, compiler)
+ # Add things like /NOLOGO or -pipe; usually can't be overridden
+ commands += compiler.get_always_args()
+ # Only add warning-flags by default if the buildtype enables it, and if
+ # we weren't explicitly asked to not emit warnings (for Vala, f.ex)
+ if no_warn_args:
+ commands += compiler.get_no_warn_args()
+ else:
+ commands += compiler.get_warn_args(self.get_option_for_target(OptionKey('warning_level'), target))
+ # Add -Werror if werror=true is set in the build options set on the
+ # command-line or default_options inside project(). This only sets the
+ # action to be done for warnings if/when they are emitted, so it's ok
+ # to set it after get_no_warn_args() or get_warn_args().
+ if self.get_option_for_target(OptionKey('werror'), target):
+ commands += compiler.get_werror_args()
+ # Add compile args for c_* or cpp_* build options set on the
+ # command-line or default_options inside project().
+ commands += compiler.get_option_compile_args(copt_proxy)
+ # Add buildtype args: optimization level, debugging, etc.
+ commands += compiler.get_buildtype_args(self.get_option_for_target(OptionKey('buildtype'), target))
+ commands += compiler.get_optimization_args(self.get_option_for_target(OptionKey('optimization'), target))
+ commands += compiler.get_debug_args(self.get_option_for_target(OptionKey('debug'), target))
+ # Add compile args added using add_project_arguments()
+ commands += self.build.get_project_args(compiler, target.subproject, target.for_machine)
+ # Add compile args added using add_global_arguments()
+ # These override per-project arguments
+ commands += self.build.get_global_args(compiler, target.for_machine)
+ # Using both /ZI and /Zi at the same times produces a compiler warning.
+ # We do not add /ZI by default. If it is being used it is because the user has explicitly enabled it.
+ # /ZI needs to be removed in that case to avoid cl's warning to that effect (D9025 : overriding '/ZI' with '/Zi')
+ if ('/ZI' in commands) and ('/Zi' in commands):
+ commands.remove('/Zi')
+ # Compile args added from the env: CFLAGS/CXXFLAGS, etc, or the cross
+ # file. We want these to override all the defaults, but not the
+ # per-target compile args.
+ commands += self.environment.coredata.get_external_args(target.for_machine, compiler.get_language())
+ # Always set -fPIC for shared libraries
+ if isinstance(target, build.SharedLibrary):
+ commands += compiler.get_pic_args()
+ # Set -fPIC for static libraries by default unless explicitly disabled
+ if isinstance(target, build.StaticLibrary) and target.pic:
+ commands += compiler.get_pic_args()
+ elif isinstance(target, (build.StaticLibrary, build.Executable)) and target.pie:
+ commands += compiler.get_pie_args()
+ # Add compile args needed to find external dependencies. Link args are
+ # added while generating the link command.
+ # NOTE: We must preserve the order in which external deps are
+ # specified, so we reverse the list before iterating over it.
+ for dep in reversed(target.get_external_deps()):
+ if not dep.found():
+ continue
+
+ if compiler.language == 'vala':
+ if isinstance(dep, dependencies.PkgConfigDependency):
+ if dep.name == 'glib-2.0' and dep.version_reqs is not None:
+ for req in dep.version_reqs:
+ if req.startswith(('>=', '==')):
+ commands += ['--target-glib', req[2:]]
+ break
+ commands += ['--pkg', dep.name]
+ elif isinstance(dep, dependencies.ExternalLibrary):
+ commands += dep.get_link_args('vala')
+ else:
+ commands += compiler.get_dependency_compile_args(dep)
+ # Qt needs -fPIC for executables
+ # XXX: We should move to -fPIC for all executables
+ if isinstance(target, build.Executable):
+ commands += dep.get_exe_args(compiler)
+ # For 'automagic' deps: Boost and GTest. Also dependency('threads').
+ # pkg-config puts the thread flags itself via `Cflags:`
+ # Fortran requires extra include directives.
+ if compiler.language == 'fortran':
+ for lt in chain(target.link_targets, target.link_whole_targets):
+ priv_dir = self.get_target_private_dir(lt)
+ commands += compiler.get_include_args(priv_dir, False)
+ return commands
+
+ def build_target_link_arguments(self, compiler, deps):
+ args = []
+ for d in deps:
+ if not (d.is_linkable_target()):
+ raise RuntimeError(f'Tried to link with a non-library target "{d.get_basename()}".')
+ arg = self.get_target_filename_for_linking(d)
+ if not arg:
+ continue
+ if compiler.get_language() == 'd':
+ arg = '-Wl,' + arg
+ else:
+ arg = compiler.get_linker_lib_prefix() + arg
+ args.append(arg)
+ return args
+
+ def get_mingw_extra_paths(self, target):
+ paths = OrderedSet()
+ # The cross bindir
+ root = self.environment.properties[target.for_machine].get_root()
+ if root:
+ paths.add(os.path.join(root, 'bin'))
+ # The toolchain bindir
+ sys_root = self.environment.properties[target.for_machine].get_sys_root()
+ if sys_root:
+ paths.add(os.path.join(sys_root, 'bin'))
+ # Get program and library dirs from all target compilers
+ if isinstance(target, build.BuildTarget):
+ for cc in target.compilers.values():
+ paths.update(cc.get_program_dirs(self.environment))
+ paths.update(cc.get_library_dirs(self.environment))
+ return list(paths)
+
+ def determine_windows_extra_paths(self, target: T.Union[build.BuildTarget, str], extra_bdeps):
+ '''On Windows there is no such thing as an rpath.
+ We must determine all locations of DLLs that this exe
+ links to and return them so they can be used in unit
+ tests.'''
+ result = set()
+ prospectives = set()
+ if isinstance(target, build.BuildTarget):
+ prospectives.update(target.get_transitive_link_deps())
+ # External deps
+ for deppath in self.rpaths_for_bundled_shared_libraries(target, exclude_system=False):
+ result.add(os.path.normpath(os.path.join(self.environment.get_build_dir(), deppath)))
+ for bdep in extra_bdeps:
+ prospectives.add(bdep)
+ prospectives.update(bdep.get_transitive_link_deps())
+ # Internal deps
+ for ld in prospectives:
+ if ld == '' or ld == '.':
+ continue
+ dirseg = os.path.join(self.environment.get_build_dir(), self.get_target_dir(ld))
+ result.add(dirseg)
+ if (isinstance(target, build.BuildTarget) and
+ not self.environment.machines.matches_build_machine(target.for_machine)):
+ result.update(self.get_mingw_extra_paths(target))
+ return list(result)
+
+ def write_benchmark_file(self, datafile):
+ self.write_test_serialisation(self.build.get_benchmarks(), datafile)
+
+ def write_test_file(self, datafile):
+ self.write_test_serialisation(self.build.get_tests(), datafile)
+
+ def create_test_serialisation(self, tests: T.List['Test']) -> T.List[TestSerialisation]:
+ arr = []
+ for t in sorted(tests, key=lambda tst: -1 * tst.priority):
+ exe = t.get_exe()
+ if isinstance(exe, programs.ExternalProgram):
+ cmd = exe.get_command()
+ else:
+ cmd = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(t.get_exe()))]
+ if isinstance(exe, (build.BuildTarget, programs.ExternalProgram)):
+ test_for_machine = exe.for_machine
+ else:
+ # E.g. an external verifier or simulator program run on a generated executable.
+ # Can always be run without a wrapper.
+ test_for_machine = MachineChoice.BUILD
+
+ # we allow passing compiled executables to tests, which may be cross built.
+ # We need to consider these as well when considering whether the target is cross or not.
+ for a in t.cmd_args:
+ if isinstance(a, build.BuildTarget):
+ if a.for_machine is MachineChoice.HOST:
+ test_for_machine = MachineChoice.HOST
+ break
+
+ is_cross = self.environment.is_cross_build(test_for_machine)
+ if is_cross and self.environment.need_exe_wrapper():
+ exe_wrapper = self.environment.get_exe_wrapper()
+ else:
+ exe_wrapper = None
+ machine = self.environment.machines[exe.for_machine]
+ if machine.is_windows() or machine.is_cygwin():
+ extra_bdeps = []
+ if isinstance(exe, build.CustomTarget):
+ extra_bdeps = exe.get_transitive_build_target_deps()
+ extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps)
+ else:
+ extra_paths = []
+
+ cmd_args = []
+ depends = set(t.depends)
+ if isinstance(exe, build.Target):
+ depends.add(exe)
+ for a in t.cmd_args:
+ if isinstance(a, build.Target):
+ depends.add(a)
+ if isinstance(a, build.BuildTarget):
+ extra_paths += self.determine_windows_extra_paths(a, [])
+ if isinstance(a, mesonlib.File):
+ a = os.path.join(self.environment.get_build_dir(), a.rel_to_builddir(self.build_to_src))
+ cmd_args.append(a)
+ elif isinstance(a, str):
+ cmd_args.append(a)
+ elif isinstance(a, build.Executable):
+ p = self.construct_target_rel_path(a, t.workdir)
+ if p == a.get_filename():
+ p = './' + p
+ cmd_args.append(p)
+ elif isinstance(a, build.Target):
+ cmd_args.append(self.construct_target_rel_path(a, t.workdir))
+ else:
+ raise MesonException('Bad object in test command.')
+ ts = TestSerialisation(t.get_name(), t.project_name, t.suite, cmd, is_cross,
+ exe_wrapper, self.environment.need_exe_wrapper(),
+ t.is_parallel, cmd_args, t.env,
+ t.should_fail, t.timeout, t.workdir,
+ extra_paths, t.protocol, t.priority,
+ isinstance(exe, build.Executable),
+ [x.get_id() for x in depends],
+ self.environment.coredata.version)
+ arr.append(ts)
+ return arr
+
+ def write_test_serialisation(self, tests: T.List['Test'], datafile: str):
+ pickle.dump(self.create_test_serialisation(tests), datafile)
+
+ def construct_target_rel_path(self, a, workdir):
+ if workdir is None:
+ return self.get_target_filename(a)
+ assert(os.path.isabs(workdir))
+ abs_path = self.get_target_filename_abs(a)
+ return os.path.relpath(abs_path, workdir)
+
+ def generate_depmf_install(self, d: InstallData) -> None:
+ if self.build.dep_manifest_name is None:
+ return
+ ifilename = os.path.join(self.environment.get_build_dir(), 'depmf.json')
+ ofilename = os.path.join(self.environment.get_prefix(), self.build.dep_manifest_name)
+ mfobj = {'type': 'dependency manifest', 'version': '1.0', 'projects': self.build.dep_manifest}
+ with open(ifilename, 'w', encoding='utf-8') as f:
+ f.write(json.dumps(mfobj))
+ # Copy file from, to, and with mode unchanged
+ d.data.append(InstallDataBase(ifilename, ofilename, None, ''))
+
+ def get_regen_filelist(self):
+ '''List of all files whose alteration means that the build
+ definition needs to be regenerated.'''
+ deps = [str(Path(self.build_to_src) / df)
+ for df in self.interpreter.get_build_def_files()]
+ if self.environment.is_cross_build():
+ deps.extend(self.environment.coredata.cross_files)
+ deps.extend(self.environment.coredata.config_files)
+ deps.append('meson-private/coredata.dat')
+ self.check_clock_skew(deps)
+ return deps
+
+ def generate_regen_info(self):
+ deps = self.get_regen_filelist()
+ regeninfo = RegenInfo(self.environment.get_source_dir(),
+ self.environment.get_build_dir(),
+ deps)
+ filename = os.path.join(self.environment.get_scratch_dir(),
+ 'regeninfo.dump')
+ with open(filename, 'wb') as f:
+ pickle.dump(regeninfo, f)
+
+ def check_clock_skew(self, file_list):
+ # If a file that leads to reconfiguration has a time
+ # stamp in the future, it will trigger an eternal reconfigure
+ # loop.
+ import time
+ now = time.time()
+ for f in file_list:
+ absf = os.path.join(self.environment.get_build_dir(), f)
+ ftime = os.path.getmtime(absf)
+ delta = ftime - now
+ # On Windows disk time stamps sometimes point
+ # to the future by a minuscule amount, less than
+ # 0.001 seconds. I don't know why.
+ if delta > 0.001:
+ raise MesonException(f'Clock skew detected. File {absf} has a time stamp {delta:.4f}s in the future.')
+
+ def build_target_to_cmd_array(self, bt):
+ if isinstance(bt, build.BuildTarget):
+ arr = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(bt))]
+ else:
+ arr = bt.get_command()
+ return arr
+
+ def replace_extra_args(self, args, genlist):
+ final_args = []
+ for a in args:
+ if a == '@EXTRA_ARGS@':
+ final_args += genlist.get_extra_args()
+ else:
+ final_args.append(a)
+ return final_args
+
+ def replace_outputs(self, args, private_dir, output_list):
+ newargs = []
+ regex = re.compile(r'@OUTPUT(\d+)@')
+ for arg in args:
+ m = regex.search(arg)
+ while m is not None:
+ index = int(m.group(1))
+ src = f'@OUTPUT{index}@'
+ arg = arg.replace(src, os.path.join(private_dir, output_list[index]))
+ m = regex.search(arg)
+ newargs.append(arg)
+ return newargs
+
+ def get_build_by_default_targets(self):
+ result = OrderedDict()
+ # Get all build and custom targets that must be built by default
+ for name, t in self.build.get_targets().items():
+ if t.build_by_default:
+ result[name] = t
+ # Get all targets used as test executables and arguments. These must
+ # also be built by default. XXX: Sometime in the future these should be
+ # built only before running tests.
+ for t in self.build.get_tests():
+ exe = t.exe
+ if isinstance(exe, (build.CustomTarget, build.BuildTarget)):
+ result[exe.get_id()] = exe
+ for arg in t.cmd_args:
+ if not isinstance(arg, (build.CustomTarget, build.BuildTarget)):
+ continue
+ result[arg.get_id()] = arg
+ for dep in t.depends:
+ assert isinstance(dep, (build.CustomTarget, build.BuildTarget))
+ result[dep.get_id()] = dep
+ return result
+
+ @lru_cache(maxsize=None)
+ def get_custom_target_provided_by_generated_source(self, generated_source: build.CustomTarget) -> 'ImmutableListProtocol[str]':
+ libs: T.List[str] = []
+ for f in generated_source.get_outputs():
+ if self.environment.is_library(f):
+ libs.append(os.path.join(self.get_target_dir(generated_source), f))
+ return libs
+
+ @lru_cache(maxsize=None)
+ def get_custom_target_provided_libraries(self, target: T.Union[build.BuildTarget, build.CustomTarget]) -> 'ImmutableListProtocol[str]':
+ libs: T.List[str] = []
+ for t in target.get_generated_sources():
+ if not isinstance(t, build.CustomTarget):
+ continue
+ libs.extend(self.get_custom_target_provided_by_generated_source(t))
+ return libs
+
+ def is_unity(self, target):
+ optval = self.get_option_for_target(OptionKey('unity'), target)
+ if optval == 'on' or (optval == 'subprojects' and target.subproject != ''):
+ return True
+ return False
+
+ def get_custom_target_sources(self, target):
+ '''
+ Custom target sources can be of various object types; strings, File,
+ BuildTarget, even other CustomTargets.
+ Returns the path to them relative to the build root directory.
+ '''
+ srcs = []
+ for i in target.get_sources():
+ if isinstance(i, str):
+ fname = [os.path.join(self.build_to_src, target.subdir, i)]
+ elif isinstance(i, build.BuildTarget):
+ fname = [self.get_target_filename(i)]
+ elif isinstance(i, (build.CustomTarget, build.CustomTargetIndex)):
+ fname = [os.path.join(self.get_custom_target_output_dir(i), p) for p in i.get_outputs()]
+ elif isinstance(i, build.GeneratedList):
+ fname = [os.path.join(self.get_target_private_dir(target), p) for p in i.get_outputs()]
+ elif isinstance(i, build.ExtractedObjects):
+ fname = [os.path.join(self.get_target_private_dir(i.target), p) for p in i.get_outputs(self)]
+ else:
+ fname = [i.rel_to_builddir(self.build_to_src)]
+ if target.absolute_paths:
+ fname = [os.path.join(self.environment.get_build_dir(), f) for f in fname]
+ srcs += fname
+ return srcs
+
+ def get_custom_target_depend_files(self, target, absolute_paths=False):
+ deps = []
+ for i in target.depend_files:
+ if isinstance(i, mesonlib.File):
+ if absolute_paths:
+ deps.append(i.absolute_path(self.environment.get_source_dir(),
+ self.environment.get_build_dir()))
+ else:
+ deps.append(i.rel_to_builddir(self.build_to_src))
+ else:
+ if absolute_paths:
+ deps.append(os.path.join(self.environment.get_source_dir(), target.subdir, i))
+ else:
+ deps.append(os.path.join(self.build_to_src, target.subdir, i))
+ return deps
+
+ def get_custom_target_output_dir(self, target):
+ # The XCode backend is special. A target foo/bar does
+ # not go to ${BUILDDIR}/foo/bar but instead to
+ # ${BUILDDIR}/${BUILDTYPE}/foo/bar.
+ # Currently we set the include dir to be the former,
+ # and not the latter. Thus we need this extra customisation
+ # point. If in the future we make include dirs et al match
+ # ${BUILDDIR}/${BUILDTYPE} instead, this becomes unnecessary.
+ return self.get_target_dir(target)
+
+ @lru_cache(maxsize=None)
+ def get_normpath_target(self, source) -> str:
+ return os.path.normpath(source)
+
+ def get_custom_target_dirs(self, target, compiler, *, absolute_path=False):
+ custom_target_include_dirs = []
+ for i in target.get_generated_sources():
+ # Generator output goes into the target private dir which is
+ # already in the include paths list. Only custom targets have their
+ # own target build dir.
+ if not isinstance(i, (build.CustomTarget, build.CustomTargetIndex)):
+ continue
+ idir = self.get_normpath_target(self.get_custom_target_output_dir(i))
+ if not idir:
+ idir = '.'
+ if absolute_path:
+ idir = os.path.join(self.environment.get_build_dir(), idir)
+ if idir not in custom_target_include_dirs:
+ custom_target_include_dirs.append(idir)
+ return custom_target_include_dirs
+
+ def get_custom_target_dir_include_args(self, target, compiler, *, absolute_path=False):
+ incs = []
+ for i in self.get_custom_target_dirs(target, compiler, absolute_path=absolute_path):
+ incs += compiler.get_include_args(i, False)
+ return incs
+
+
+ def eval_custom_target_command(self, target, absolute_outputs=False):
+ # We want the outputs to be absolute only when using the VS backend
+ # XXX: Maybe allow the vs backend to use relative paths too?
+ source_root = self.build_to_src
+ build_root = '.'
+ outdir = self.get_custom_target_output_dir(target)
+ if absolute_outputs:
+ source_root = self.environment.get_source_dir()
+ build_root = self.environment.get_build_dir()
+ outdir = os.path.join(self.environment.get_build_dir(), outdir)
+ outputs = []
+ for i in target.get_outputs():
+ outputs.append(os.path.join(outdir, i))
+ inputs = self.get_custom_target_sources(target)
+ # Evaluate the command list
+ cmd = []
+ for i in target.command:
+ if isinstance(i, build.BuildTarget):
+ cmd += self.build_target_to_cmd_array(i)
+ continue
+ elif isinstance(i, build.CustomTarget):
+ # GIR scanner will attempt to execute this binary but
+ # it assumes that it is in path, so always give it a full path.
+ tmp = i.get_outputs()[0]
+ i = os.path.join(self.get_custom_target_output_dir(i), tmp)
+ elif isinstance(i, mesonlib.File):
+ i = i.rel_to_builddir(self.build_to_src)
+ if target.absolute_paths or absolute_outputs:
+ i = os.path.join(self.environment.get_build_dir(), i)
+ # FIXME: str types are blindly added ignoring 'target.absolute_paths'
+ # because we can't know if they refer to a file or just a string
+ elif isinstance(i, str):
+ if '@SOURCE_ROOT@' in i:
+ i = i.replace('@SOURCE_ROOT@', source_root)
+ if '@BUILD_ROOT@' in i:
+ i = i.replace('@BUILD_ROOT@', build_root)
+ if '@CURRENT_SOURCE_DIR@' in i:
+ i = i.replace('@CURRENT_SOURCE_DIR@', os.path.join(source_root, target.subdir))
+ if '@DEPFILE@' in i:
+ if target.depfile is None:
+ msg = f'Custom target {target.name!r} has @DEPFILE@ but no depfile ' \
+ 'keyword argument.'
+ raise MesonException(msg)
+ dfilename = os.path.join(outdir, target.depfile)
+ i = i.replace('@DEPFILE@', dfilename)
+ if '@PRIVATE_DIR@' in i:
+ if target.absolute_paths:
+ pdir = self.get_target_private_dir_abs(target)
+ else:
+ pdir = self.get_target_private_dir(target)
+ i = i.replace('@PRIVATE_DIR@', pdir)
+ else:
+ raise RuntimeError(f'Argument {i} is of unknown type {type(i)}')
+ cmd.append(i)
+ # Substitute the rest of the template strings
+ values = mesonlib.get_filenames_templates_dict(inputs, outputs)
+ cmd = mesonlib.substitute_values(cmd, values)
+ # This should not be necessary but removing it breaks
+ # building GStreamer on Windows. The underlying issue
+ # is problems with quoting backslashes on Windows
+ # which is the seventh circle of hell. The downside is
+ # that this breaks custom targets whose command lines
+ # have backslashes. If you try to fix this be sure to
+ # check that it does not break GST.
+ #
+ # The bug causes file paths such as c:\foo to get escaped
+ # into c:\\foo.
+ #
+ # Unfortunately we have not been able to come up with an
+ # isolated test case for this so unless you manage to come up
+ # with one, the only way is to test the building with Gst's
+ # setup. Note this in your MR or ping us and we will get it
+ # fixed.
+ #
+ # https://github.com/mesonbuild/meson/pull/737
+ cmd = [i.replace('\\', '/') for i in cmd]
+ return inputs, outputs, cmd
+
+ def get_run_target_env(self, target: build.RunTarget) -> build.EnvironmentVariables:
+ env = target.env if target.env else build.EnvironmentVariables()
+ introspect_cmd = join_args(self.environment.get_build_command() + ['introspect'])
+ env.set('MESON_SOURCE_ROOT', [self.environment.get_source_dir()])
+ env.set('MESON_BUILD_ROOT', [self.environment.get_build_dir()])
+ env.set('MESON_SUBDIR', [target.subdir])
+ env.set('MESONINTROSPECT', [introspect_cmd])
+ return env
+
+ def run_postconf_scripts(self) -> None:
+ from ..scripts.meson_exe import run_exe
+ introspect_cmd = join_args(self.environment.get_build_command() + ['introspect'])
+ env = {'MESON_SOURCE_ROOT': self.environment.get_source_dir(),
+ 'MESON_BUILD_ROOT': self.environment.get_build_dir(),
+ 'MESONINTROSPECT': introspect_cmd,
+ }
+
+ for s in self.build.postconf_scripts:
+ name = ' '.join(s.cmd_args)
+ mlog.log(f'Running postconf script {name!r}')
+ run_exe(s, env)
+
+ def create_install_data(self) -> InstallData:
+ strip_bin = self.environment.lookup_binary_entry(MachineChoice.HOST, 'strip')
+ if strip_bin is None:
+ if self.environment.is_cross_build():
+ mlog.warning('Cross file does not specify strip binary, result will not be stripped.')
+ else:
+ # TODO go through all candidates, like others
+ strip_bin = [detect.defaults['strip'][0]]
+ d = InstallData(self.environment.get_source_dir(),
+ self.environment.get_build_dir(),
+ self.environment.get_prefix(),
+ strip_bin,
+ self.environment.coredata.get_option(OptionKey('install_umask')),
+ self.environment.get_build_command() + ['introspect'],
+ self.environment.coredata.version)
+ self.generate_depmf_install(d)
+ self.generate_target_install(d)
+ self.generate_header_install(d)
+ self.generate_man_install(d)
+ self.generate_data_install(d)
+ self.generate_custom_install_script(d)
+ self.generate_subdir_install(d)
+ return d
+
+ def create_install_data_files(self):
+ install_data_file = os.path.join(self.environment.get_scratch_dir(), 'install.dat')
+ with open(install_data_file, 'wb') as ofile:
+ pickle.dump(self.create_install_data(), ofile)
+
+ def generate_target_install(self, d: InstallData) -> None:
+ for t in self.build.get_targets().values():
+ if not t.should_install():
+ continue
+ outdirs, custom_install_dir = t.get_install_dir(self.environment)
+ # Sanity-check the outputs and install_dirs
+ num_outdirs, num_out = len(outdirs), len(t.get_outputs())
+ if num_outdirs != 1 and num_outdirs != num_out:
+ m = 'Target {!r} has {} outputs: {!r}, but only {} "install_dir"s were found.\n' \
+ "Pass 'false' for outputs that should not be installed and 'true' for\n" \
+ 'using the default installation directory for an output.'
+ raise MesonException(m.format(t.name, num_out, t.get_outputs(), num_outdirs))
+ install_mode = t.get_custom_install_mode()
+ # Install the target output(s)
+ if isinstance(t, build.BuildTarget):
+ # In general, stripping static archives is tricky and full of pitfalls.
+ # Wholesale stripping of static archives with a command such as
+ #
+ # strip libfoo.a
+ #
+ # is broken, as GNU's strip will remove *every* symbol in a static
+ # archive. One solution to this nonintuitive behaviour would be
+ # to only strip local/debug symbols. Unfortunately, strip arguments
+ # are not specified by POSIX and therefore not portable. GNU's `-g`
+ # option (i.e. remove debug symbols) is equivalent to Apple's `-S`.
+ #
+ # TODO: Create GNUStrip/AppleStrip/etc. hierarchy for more
+ # fine-grained stripping of static archives.
+ should_strip = not isinstance(t, build.StaticLibrary) and self.get_option_for_target(OptionKey('strip'), t)
+ assert isinstance(should_strip, bool), 'for mypy'
+ # Install primary build output (library/executable/jar, etc)
+ # Done separately because of strip/aliases/rpath
+ if outdirs[0] is not False:
+ mappings = t.get_link_deps_mapping(d.prefix, self.environment)
+ i = TargetInstallData(self.get_target_filename(t), outdirs[0],
+ t.get_aliases(), should_strip, mappings,
+ t.rpath_dirs_to_remove,
+ t.install_rpath, install_mode, t.subproject)
+ d.targets.append(i)
+
+ if isinstance(t, (build.SharedLibrary, build.SharedModule, build.Executable)):
+ # On toolchains/platforms that use an import library for
+ # linking (separate from the shared library with all the
+ # code), we need to install that too (dll.a/.lib).
+ if t.get_import_filename():
+ if custom_install_dir:
+ # If the DLL is installed into a custom directory,
+ # install the import library into the same place so
+ # it doesn't go into a surprising place
+ implib_install_dir = outdirs[0]
+ else:
+ implib_install_dir = self.environment.get_import_lib_dir()
+ # Install the import library; may not exist for shared modules
+ i = TargetInstallData(self.get_target_filename_for_linking(t),
+ implib_install_dir, {}, False, {}, set(), '', install_mode,
+ t.subproject, optional=isinstance(t, build.SharedModule))
+ d.targets.append(i)
+
+ if not should_strip and t.get_debug_filename():
+ debug_file = os.path.join(self.get_target_dir(t), t.get_debug_filename())
+ i = TargetInstallData(debug_file, outdirs[0],
+ {}, False, {}, set(), '',
+ install_mode, t.subproject,
+ optional=True)
+ d.targets.append(i)
+ # Install secondary outputs. Only used for Vala right now.
+ if num_outdirs > 1:
+ for output, outdir in zip(t.get_outputs()[1:], outdirs[1:]):
+ # User requested that we not install this output
+ if outdir is False:
+ continue
+ f = os.path.join(self.get_target_dir(t), output)
+ i = TargetInstallData(f, outdir, {}, False, {}, set(), None,
+ install_mode, t.subproject)
+ d.targets.append(i)
+ elif isinstance(t, build.CustomTarget):
+ # If only one install_dir is specified, assume that all
+ # outputs will be installed into it. This is for
+ # backwards-compatibility and because it makes sense to
+ # avoid repetition since this is a common use-case.
+ #
+ # To selectively install only some outputs, pass `false` as
+ # the install_dir for the corresponding output by index
+ if num_outdirs == 1 and num_out > 1:
+ for output in t.get_outputs():
+ f = os.path.join(self.get_target_dir(t), output)
+ i = TargetInstallData(f, outdirs[0], {}, False, {}, set(), None, install_mode,
+ t.subproject, optional=not t.build_by_default)
+ d.targets.append(i)
+ else:
+ for output, outdir in zip(t.get_outputs(), outdirs):
+ # User requested that we not install this output
+ if outdir is False:
+ continue
+ f = os.path.join(self.get_target_dir(t), output)
+ i = TargetInstallData(f, outdir, {}, False, {}, set(), None, install_mode,
+ t.subproject, optional=not t.build_by_default)
+ d.targets.append(i)
+
+ def generate_custom_install_script(self, d: InstallData) -> None:
+ d.install_scripts = self.build.install_scripts
+
+ def generate_header_install(self, d: InstallData) -> None:
+ incroot = self.environment.get_includedir()
+ headers = self.build.get_headers()
+
+ srcdir = self.environment.get_source_dir()
+ builddir = self.environment.get_build_dir()
+ for h in headers:
+ outdir = h.get_custom_install_dir()
+ if outdir is None:
+ subdir = h.get_install_subdir()
+ if subdir is None:
+ outdir = incroot
+ else:
+ outdir = os.path.join(incroot, subdir)
+
+ for f in h.get_sources():
+ if not isinstance(f, File):
+ raise MesonException(f'Invalid header type {f!r} can\'t be installed')
+ abspath = f.absolute_path(srcdir, builddir)
+ i = InstallDataBase(abspath, outdir, h.get_custom_install_mode(), h.subproject)
+ d.headers.append(i)
+
+ def generate_man_install(self, d: InstallData) -> None:
+ manroot = self.environment.get_mandir()
+ man = self.build.get_man()
+ for m in man:
+ for f in m.get_sources():
+ num = f.split('.')[-1]
+ subdir = m.get_custom_install_dir()
+ if subdir is None:
+ if m.locale:
+ subdir = os.path.join(manroot, m.locale, 'man' + num)
+ else:
+ subdir = os.path.join(manroot, 'man' + num)
+ fname = f.fname
+ if m.locale: # strip locale from file name
+ fname = fname.replace(f'.{m.locale}', '')
+ srcabs = f.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir())
+ dstabs = os.path.join(subdir, os.path.basename(fname))
+ i = InstallDataBase(srcabs, dstabs, m.get_custom_install_mode(), m.subproject)
+ d.man.append(i)
+
+ def generate_data_install(self, d: InstallData):
+ data = self.build.get_data()
+ srcdir = self.environment.get_source_dir()
+ builddir = self.environment.get_build_dir()
+ for de in data:
+ assert(isinstance(de, build.Data))
+ subdir = de.install_dir
+ if not subdir:
+ subdir = os.path.join(self.environment.get_datadir(), self.interpreter.build.project_name)
+ for src_file, dst_name in zip(de.sources, de.rename):
+ assert(isinstance(src_file, mesonlib.File))
+ dst_abs = os.path.join(subdir, dst_name)
+ i = InstallDataBase(src_file.absolute_path(srcdir, builddir), dst_abs, de.install_mode, de.subproject)
+ d.data.append(i)
+
+ def generate_subdir_install(self, d: InstallData) -> None:
+ for sd in self.build.get_install_subdirs():
+ if sd.from_source_dir:
+ from_dir = self.environment.get_source_dir()
+ else:
+ from_dir = self.environment.get_build_dir()
+ src_dir = os.path.join(from_dir,
+ sd.source_subdir,
+ sd.installable_subdir).rstrip('/')
+ dst_dir = os.path.join(self.environment.get_prefix(),
+ sd.install_dir)
+ if not sd.strip_directory:
+ dst_dir = os.path.join(dst_dir, os.path.basename(src_dir))
+ i = SubdirInstallData(src_dir, dst_dir, sd.install_mode, sd.exclude, sd.subproject)
+ d.install_subdirs.append(i)
+
+ def get_introspection_data(self, target_id: str, target: build.Target) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
+ '''
+ Returns a list of source dicts with the following format for a given target:
+ [
+ {
+ "language": "<LANG>",
+ "compiler": ["result", "of", "comp.get_exelist()"],
+ "parameters": ["list", "of", "compiler", "parameters],
+ "sources": ["list", "of", "all", "<LANG>", "source", "files"],
+ "generated_sources": ["list", "of", "generated", "source", "files"]
+ }
+ ]
+
+ This is a limited fallback / reference implementation. The backend should override this method.
+ '''
+ if isinstance(target, (build.CustomTarget, build.BuildTarget)):
+ source_list_raw = target.sources
+ source_list = []
+ for j in source_list_raw:
+ if isinstance(j, mesonlib.File):
+ source_list += [j.absolute_path(self.source_dir, self.build_dir)]
+ elif isinstance(j, str):
+ source_list += [os.path.join(self.source_dir, j)]
+ elif isinstance(j, (build.CustomTarget, build.BuildTarget)):
+ source_list += [os.path.join(self.build_dir, j.get_subdir(), o) for o in j.get_outputs()]
+ source_list = list(map(lambda x: os.path.normpath(x), source_list))
+
+ compiler = []
+ if isinstance(target, build.CustomTarget):
+ tmp_compiler = target.command
+ if not isinstance(compiler, list):
+ tmp_compiler = [compiler]
+ for j in tmp_compiler:
+ if isinstance(j, mesonlib.File):
+ compiler += [j.absolute_path(self.source_dir, self.build_dir)]
+ elif isinstance(j, str):
+ compiler += [j]
+ elif isinstance(j, (build.BuildTarget, build.CustomTarget)):
+ compiler += j.get_outputs()
+ else:
+ raise RuntimeError(f'Type "{type(j).__name__}" is not supported in get_introspection_data. This is a bug')
+
+ return [{
+ 'language': 'unknown',
+ 'compiler': compiler,
+ 'parameters': [],
+ 'sources': source_list,
+ 'generated_sources': []
+ }]
+
+ return []
+
+ def get_devenv(self) -> build.EnvironmentVariables:
+ env = build.EnvironmentVariables()
+ extra_paths = set()
+ library_paths = set()
+ for t in self.build.get_targets().values():
+ cross_built = not self.environment.machines.matches_build_machine(t.for_machine)
+ can_run = not cross_built or not self.environment.need_exe_wrapper()
+ in_default_dir = t.should_install() and not t.get_install_dir(self.environment)[1]
+ if not can_run or not in_default_dir:
+ continue
+ tdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(t))
+ if isinstance(t, build.Executable):
+ # Add binaries that are going to be installed in bindir into PATH
+ # so they get used by default instead of searching on system when
+ # in developer environment.
+ extra_paths.add(tdir)
+ if mesonlib.is_windows() or mesonlib.is_cygwin():
+ # On windows we cannot rely on rpath to run executables from build
+ # directory. We have to add in PATH the location of every DLL needed.
+ extra_paths.update(self.determine_windows_extra_paths(t, []))
+ elif isinstance(t, build.SharedLibrary):
+ # Add libraries that are going to be installed in libdir into
+ # LD_LIBRARY_PATH. This allows running system applications using
+ # that library.
+ library_paths.add(tdir)
+ if mesonlib.is_windows() or mesonlib.is_cygwin():
+ extra_paths.update(library_paths)
+ elif mesonlib.is_osx():
+ env.prepend('DYLD_LIBRARY_PATH', list(library_paths))
+ else:
+ env.prepend('LD_LIBRARY_PATH', list(library_paths))
+ env.prepend('PATH', list(extra_paths))
+ return env
diff --git a/meson/mesonbuild/backend/ninjabackend.py b/meson/mesonbuild/backend/ninjabackend.py
new file mode 100644
index 000000000..ca8379161
--- /dev/null
+++ b/meson/mesonbuild/backend/ninjabackend.py
@@ -0,0 +1,3352 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import typing as T
+import os
+import re
+import pickle
+import shlex
+import subprocess
+from collections import OrderedDict
+from enum import Enum, unique
+import itertools
+from textwrap import dedent
+from pathlib import PurePath, Path
+from functools import lru_cache
+
+from . import backends
+from .. import modules
+from .. import environment, mesonlib
+from .. import build
+from .. import mlog
+from .. import compilers
+from ..arglist import CompilerArgs
+from ..compilers import (
+ Compiler, CCompiler,
+ FortranCompiler,
+ mixins,
+ PGICCompiler,
+ VisualStudioLikeCompiler,
+)
+from ..linkers import ArLinker, RSPFileSyntax
+from ..mesonlib import (
+ File, LibType, MachineChoice, MesonException, OrderedSet, PerMachine,
+ ProgressBar, quote_arg
+)
+from ..mesonlib import get_compiler_for_source, has_path_sep, OptionKey
+from .backends import CleanTrees
+from ..build import GeneratedList, InvalidArguments, ExtractedObjects
+from ..interpreter import Interpreter
+from ..mesonmain import need_setup_vsenv
+
+if T.TYPE_CHECKING:
+ from .._typing import ImmutableListProtocol
+ from ..linkers import StaticLinker
+ from ..compilers.cs import CsCompiler
+
+
+FORTRAN_INCLUDE_PAT = r"^\s*#?include\s*['\"](\w+\.\w+)['\"]"
+FORTRAN_MODULE_PAT = r"^\s*\bmodule\b\s+(\w+)\s*(?:!+.*)*$"
+FORTRAN_SUBMOD_PAT = r"^\s*\bsubmodule\b\s*\((\w+:?\w+)\)\s*(\w+)"
+FORTRAN_USE_PAT = r"^\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)"
+
+def cmd_quote(s):
+ # see: https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/nf-shellapi-commandlinetoargvw#remarks
+
+ # backslash escape any existing double quotes
+ # any existing backslashes preceding a quote are doubled
+ s = re.sub(r'(\\*)"', lambda m: '\\' * (len(m.group(1)) * 2 + 1) + '"', s)
+ # any terminal backslashes likewise need doubling
+ s = re.sub(r'(\\*)$', lambda m: '\\' * (len(m.group(1)) * 2), s)
+ # and double quote
+ s = f'"{s}"'
+
+ return s
+
+def gcc_rsp_quote(s):
+ # see: the function buildargv() in libiberty
+ #
+ # this differs from sh-quoting in that a backslash *always* escapes the
+ # following character, even inside single quotes.
+
+ s = s.replace('\\', '\\\\')
+
+ return shlex.quote(s)
+
+# How ninja executes command lines differs between Unix and Windows
+# (see https://ninja-build.org/manual.html#ref_rule_command)
+if mesonlib.is_windows():
+ quote_func = cmd_quote
+ execute_wrapper = ['cmd', '/c'] # unused
+ rmfile_prefix = ['del', '/f', '/s', '/q', '{}', '&&']
+else:
+ quote_func = quote_arg
+ execute_wrapper = []
+ rmfile_prefix = ['rm', '-f', '{}', '&&']
+
+
+def get_rsp_threshold():
+ '''Return a conservative estimate of the commandline size in bytes
+ above which a response file should be used. May be overridden for
+ debugging by setting environment variable MESON_RSP_THRESHOLD.'''
+
+ if mesonlib.is_windows():
+ # Usually 32k, but some projects might use cmd.exe,
+ # and that has a limit of 8k.
+ limit = 8192
+ else:
+ # On Linux, ninja always passes the commandline as a single
+ # big string to /bin/sh, and the kernel limits the size of a
+ # single argument; see MAX_ARG_STRLEN
+ limit = 131072
+ # Be conservative
+ limit = limit / 2
+ return int(os.environ.get('MESON_RSP_THRESHOLD', limit))
+
+# a conservative estimate of the command-line length limit
+rsp_threshold = get_rsp_threshold()
+
+# ninja variables whose value should remain unquoted. The value of these ninja
+# variables (or variables we use them in) is interpreted directly by ninja
+# (e.g. the value of the depfile variable is a pathname that ninja will read
+# from, etc.), so it must not be shell quoted.
+raw_names = {'DEPFILE_UNQUOTED', 'DESC', 'pool', 'description', 'targetdep', 'dyndep'}
+
+NINJA_QUOTE_BUILD_PAT = re.compile(r"[$ :\n]")
+NINJA_QUOTE_VAR_PAT = re.compile(r"[$ \n]")
+
+def ninja_quote(text: str, is_build_line=False) -> str:
+ if is_build_line:
+ quote_re = NINJA_QUOTE_BUILD_PAT
+ else:
+ quote_re = NINJA_QUOTE_VAR_PAT
+ # Fast path for when no quoting is necessary
+ if not quote_re.search(text):
+ return text
+ if '\n' in text:
+ errmsg = f'''Ninja does not support newlines in rules. The content was:
+
+{text}
+
+Please report this error with a test case to the Meson bug tracker.'''
+ raise MesonException(errmsg)
+ return quote_re.sub(r'$\g<0>', text)
+
+class TargetDependencyScannerInfo:
+ def __init__(self, private_dir: str, source2object: T.Dict[str, str]):
+ self.private_dir = private_dir
+ self.source2object = source2object
+
+@unique
+class Quoting(Enum):
+ both = 0
+ notShell = 1
+ notNinja = 2
+ none = 3
+
+class NinjaCommandArg:
+ def __init__(self, s, quoting = Quoting.both):
+ self.s = s
+ self.quoting = quoting
+
+ def __str__(self):
+ return self.s
+
+ @staticmethod
+ def list(l, q):
+ return [NinjaCommandArg(i, q) for i in l]
+
+class NinjaComment:
+ def __init__(self, comment):
+ self.comment = comment
+
+ def write(self, outfile):
+ for l in self.comment.split('\n'):
+ outfile.write('# ')
+ outfile.write(l)
+ outfile.write('\n')
+ outfile.write('\n')
+
+class NinjaRule:
+ def __init__(self, rule, command, args, description,
+ rspable = False, deps = None, depfile = None, extra = None,
+ rspfile_quote_style: RSPFileSyntax = RSPFileSyntax.GCC):
+
+ def strToCommandArg(c):
+ if isinstance(c, NinjaCommandArg):
+ return c
+
+ # deal with common cases here, so we don't have to explicitly
+ # annotate the required quoting everywhere
+ if c == '&&':
+ # shell constructs shouldn't be shell quoted
+ return NinjaCommandArg(c, Quoting.notShell)
+ if c.startswith('$'):
+ var = re.search(r'\$\{?(\w*)\}?', c).group(1)
+ if var not in raw_names:
+ # ninja variables shouldn't be ninja quoted, and their value
+ # is already shell quoted
+ return NinjaCommandArg(c, Quoting.none)
+ else:
+ # shell quote the use of ninja variables whose value must
+ # not be shell quoted (as it also used by ninja)
+ return NinjaCommandArg(c, Quoting.notNinja)
+
+ return NinjaCommandArg(c)
+
+ self.name = rule
+ self.command = list(map(strToCommandArg, command)) # includes args which never go into a rspfile
+ self.args = list(map(strToCommandArg, args)) # args which will go into a rspfile, if used
+ self.description = description
+ self.deps = deps # depstyle 'gcc' or 'msvc'
+ self.depfile = depfile
+ self.extra = extra
+ self.rspable = rspable # if a rspfile can be used
+ self.refcount = 0
+ self.rsprefcount = 0
+ self.rspfile_quote_style = rspfile_quote_style
+
+ if self.depfile == '$DEPFILE':
+ self.depfile += '_UNQUOTED'
+
+ @staticmethod
+ def _quoter(x, qf = quote_func):
+ if isinstance(x, NinjaCommandArg):
+ if x.quoting == Quoting.none:
+ return x.s
+ elif x.quoting == Quoting.notNinja:
+ return qf(x.s)
+ elif x.quoting == Quoting.notShell:
+ return ninja_quote(x.s)
+ # fallthrough
+ return ninja_quote(qf(str(x)))
+
+ def write(self, outfile):
+ if self.rspfile_quote_style is RSPFileSyntax.MSVC:
+ rspfile_quote_func = cmd_quote
+ else:
+ rspfile_quote_func = gcc_rsp_quote
+
+ def rule_iter():
+ if self.refcount:
+ yield ''
+ if self.rsprefcount:
+ yield '_RSP'
+
+ for rsp in rule_iter():
+ outfile.write(f'rule {self.name}{rsp}\n')
+ if rsp == '_RSP':
+ outfile.write(' command = {} @$out.rsp\n'.format(' '.join([self._quoter(x) for x in self.command])))
+ outfile.write(' rspfile = $out.rsp\n')
+ outfile.write(' rspfile_content = {}\n'.format(' '.join([self._quoter(x, rspfile_quote_func) for x in self.args])))
+ else:
+ outfile.write(' command = {}\n'.format(' '.join([self._quoter(x) for x in (self.command + self.args)])))
+ if self.deps:
+ outfile.write(f' deps = {self.deps}\n')
+ if self.depfile:
+ outfile.write(f' depfile = {self.depfile}\n')
+ outfile.write(f' description = {self.description}\n')
+ if self.extra:
+ for l in self.extra.split('\n'):
+ outfile.write(' ')
+ outfile.write(l)
+ outfile.write('\n')
+ outfile.write('\n')
+
+ def length_estimate(self, infiles, outfiles, elems):
+ # determine variables
+ # this order of actions only approximates ninja's scoping rules, as
+ # documented at: https://ninja-build.org/manual.html#ref_scope
+ ninja_vars = {}
+ for e in elems:
+ (name, value) = e
+ ninja_vars[name] = value
+ ninja_vars['deps'] = self.deps
+ ninja_vars['depfile'] = self.depfile
+ ninja_vars['in'] = infiles
+ ninja_vars['out'] = outfiles
+
+ # expand variables in command
+ command = ' '.join([self._quoter(x) for x in self.command + self.args])
+ estimate = len(command)
+ for m in re.finditer(r'(\${\w+}|\$\w+)?[^$]*', command):
+ if m.start(1) != -1:
+ estimate -= m.end(1) - m.start(1) + 1
+ chunk = m.group(1)
+ if chunk[1] == '{':
+ chunk = chunk[2:-1]
+ else:
+ chunk = chunk[1:]
+ chunk = ninja_vars.get(chunk, []) # undefined ninja variables are empty
+ estimate += len(' '.join(chunk))
+
+ # determine command length
+ return estimate
+
+class NinjaBuildElement:
+ def __init__(self, all_outputs, outfilenames, rulename, infilenames, implicit_outs=None):
+ self.implicit_outfilenames = implicit_outs or []
+ if isinstance(outfilenames, str):
+ self.outfilenames = [outfilenames]
+ else:
+ self.outfilenames = outfilenames
+ assert(isinstance(rulename, str))
+ self.rulename = rulename
+ if isinstance(infilenames, str):
+ self.infilenames = [infilenames]
+ else:
+ self.infilenames = infilenames
+ self.deps = OrderedSet()
+ self.orderdeps = OrderedSet()
+ self.elems = []
+ self.all_outputs = all_outputs
+
+ def add_dep(self, dep):
+ if isinstance(dep, list):
+ self.deps.update(dep)
+ else:
+ self.deps.add(dep)
+
+ def add_orderdep(self, dep):
+ if isinstance(dep, list):
+ self.orderdeps.update(dep)
+ else:
+ self.orderdeps.add(dep)
+
+ def add_item(self, name, elems):
+ # Always convert from GCC-style argument naming to the naming used by the
+ # current compiler. Also filter system include paths, deduplicate, etc.
+ if isinstance(elems, CompilerArgs):
+ elems = elems.to_native()
+ if isinstance(elems, str):
+ elems = [elems]
+ self.elems.append((name, elems))
+
+ if name == 'DEPFILE':
+ self.elems.append((name + '_UNQUOTED', elems))
+
+ def _should_use_rspfile(self):
+ # 'phony' is a rule built-in to ninja
+ if self.rulename == 'phony':
+ return False
+
+ if not self.rule.rspable:
+ return False
+
+ infilenames = ' '.join([ninja_quote(i, True) for i in self.infilenames])
+ outfilenames = ' '.join([ninja_quote(i, True) for i in self.outfilenames])
+
+ return self.rule.length_estimate(infilenames,
+ outfilenames,
+ self.elems) >= rsp_threshold
+
+ def count_rule_references(self):
+ if self.rulename != 'phony':
+ if self._should_use_rspfile():
+ self.rule.rsprefcount += 1
+ else:
+ self.rule.refcount += 1
+
+ def write(self, outfile):
+ self.check_outputs()
+ ins = ' '.join([ninja_quote(i, True) for i in self.infilenames])
+ outs = ' '.join([ninja_quote(i, True) for i in self.outfilenames])
+ implicit_outs = ' '.join([ninja_quote(i, True) for i in self.implicit_outfilenames])
+ if implicit_outs:
+ implicit_outs = ' | ' + implicit_outs
+ use_rspfile = self._should_use_rspfile()
+ if use_rspfile:
+ rulename = self.rulename + '_RSP'
+ mlog.debug("Command line for building %s is long, using a response file" % self.outfilenames)
+ else:
+ rulename = self.rulename
+ line = f'build {outs}{implicit_outs}: {rulename} {ins}'
+ if len(self.deps) > 0:
+ line += ' | ' + ' '.join([ninja_quote(x, True) for x in sorted(self.deps)])
+ if len(self.orderdeps) > 0:
+ line += ' || ' + ' '.join([ninja_quote(x, True) for x in sorted(self.orderdeps)])
+ line += '\n'
+ # This is the only way I could find to make this work on all
+ # platforms including Windows command shell. Slash is a dir separator
+ # on Windows, too, so all characters are unambiguous and, more importantly,
+ # do not require quoting, unless explicitly specified, which is necessary for
+ # the csc compiler.
+ line = line.replace('\\', '/')
+ if mesonlib.is_windows():
+ # Support network paths as backslash, otherwise they are interpreted as
+ # arguments for compile/link commands when using MSVC
+ line = ' '.join(
+ (l.replace('//', '\\\\', 1) if l.startswith('//') else l)
+ for l in line.split(' ')
+ )
+ outfile.write(line)
+
+ if use_rspfile:
+ if self.rule.rspfile_quote_style is RSPFileSyntax.MSVC:
+ qf = cmd_quote
+ else:
+ qf = gcc_rsp_quote
+ else:
+ qf = quote_func
+
+ for e in self.elems:
+ (name, elems) = e
+ should_quote = name not in raw_names
+ line = f' {name} = '
+ newelems = []
+ for i in elems:
+ if not should_quote or i == '&&': # Hackety hack hack
+ newelems.append(ninja_quote(i))
+ else:
+ newelems.append(ninja_quote(qf(i)))
+ line += ' '.join(newelems)
+ line += '\n'
+ outfile.write(line)
+ outfile.write('\n')
+
+ def check_outputs(self):
+ for n in self.outfilenames:
+ if n in self.all_outputs:
+ raise MesonException(f'Multiple producers for Ninja target "{n}". Please rename your targets.')
+ self.all_outputs[n] = True
+
+class NinjaBackend(backends.Backend):
+
+ def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]):
+ super().__init__(build, interpreter)
+ self.name = 'ninja'
+ self.ninja_filename = 'build.ninja'
+ self.fortran_deps = {}
+ self.all_outputs = {}
+ self.introspection_data = {}
+ self.created_llvm_ir_rule = PerMachine(False, False)
+
+ def create_target_alias(self, to_target):
+ # We need to use aliases for targets that might be used as directory
+ # names to workaround a Ninja bug that breaks `ninja -t clean`.
+ # This is used for 'reserved' targets such as 'test', 'install',
+ # 'benchmark', etc, and also for RunTargets.
+ # https://github.com/mesonbuild/meson/issues/1644
+ if not to_target.startswith('meson-'):
+ raise AssertionError(f'Invalid usage of create_target_alias with {to_target!r}')
+ from_target = to_target[len('meson-'):]
+ elem = NinjaBuildElement(self.all_outputs, from_target, 'phony', to_target)
+ self.add_build(elem)
+
+ def detect_vs_dep_prefix(self, tempfilename):
+ '''VS writes its dependency in a locale dependent format.
+ Detect the search prefix to use.'''
+ # TODO don't hard-code host
+ for compiler in self.environment.coredata.compilers.host.values():
+ # Have to detect the dependency format
+
+ # IFort on windows is MSVC like, but doesn't have /showincludes
+ if isinstance(compiler, FortranCompiler):
+ continue
+ if isinstance(compiler, PGICCompiler) and mesonlib.is_windows():
+ # for the purpose of this function, PGI doesn't act enough like MSVC
+ return open(tempfilename, 'a', encoding='utf-8')
+ if isinstance(compiler, VisualStudioLikeCompiler):
+ break
+ else:
+ # None of our compilers are MSVC, we're done.
+ return open(tempfilename, 'a', encoding='utf-8')
+ filename = os.path.join(self.environment.get_scratch_dir(),
+ 'incdetect.c')
+ with open(filename, 'w', encoding='utf-8') as f:
+ f.write(dedent('''\
+ #include<stdio.h>
+ int dummy;
+ '''))
+
+ # The output of cl dependency information is language
+ # and locale dependent. Any attempt at converting it to
+ # Python strings leads to failure. We _must_ do this detection
+ # in raw byte mode and write the result in raw bytes.
+ pc = subprocess.Popen(compiler.get_exelist() +
+ ['/showIncludes', '/c', 'incdetect.c'],
+ cwd=self.environment.get_scratch_dir(),
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (stdout, stderr) = pc.communicate()
+
+ # We want to match 'Note: including file: ' in the line
+ # 'Note: including file: d:\MyDir\include\stdio.h', however
+ # different locales have different messages with a different
+ # number of colons. Match up to the the drive name 'd:\'.
+ # When used in cross compilation, the path separator is a
+ # forward slash rather than a backslash so handle both; i.e.
+ # the path is /MyDir/include/stdio.h.
+ # With certain cross compilation wrappings of MSVC, the paths
+ # use backslashes, but without the leading drive name, so
+ # allow the path to start with any path separator, i.e.
+ # \MyDir\include\stdio.h.
+ matchre = re.compile(rb"^(.*\s)([a-zA-Z]:\\|[\\\/]).*stdio.h$")
+
+ def detect_prefix(out):
+ for line in re.split(rb'\r?\n', out):
+ match = matchre.match(line)
+ if match:
+ with open(tempfilename, 'ab') as binfile:
+ binfile.write(b'msvc_deps_prefix = ' + match.group(1) + b'\n')
+ return open(tempfilename, 'a', encoding='utf-8')
+ return None
+
+ # Some cl wrappers (e.g. Squish Coco) output dependency info
+ # to stderr rather than stdout
+ result = detect_prefix(stdout) or detect_prefix(stderr)
+ if result:
+ return result
+
+ raise MesonException('Could not determine vs dep dependency prefix string.')
+
+ def generate(self):
+ ninja = environment.detect_ninja_command_and_version(log=True)
+ if need_setup_vsenv:
+ builddir = Path(self.environment.get_build_dir())
+ builddir = builddir.relative_to(Path.cwd())
+ meson_command = mesonlib.join_args(mesonlib.get_meson_command())
+ mlog.log()
+ mlog.log('Visual Studio environment is needed to run Ninja. It is recommended to use Meson wrapper:')
+ mlog.log(f'{meson_command} compile -C {builddir}')
+ if ninja is None:
+ raise MesonException('Could not detect Ninja v1.8.2 or newer')
+ (self.ninja_command, self.ninja_version) = ninja
+ outfilename = os.path.join(self.environment.get_build_dir(), self.ninja_filename)
+ tempfilename = outfilename + '~'
+ with open(tempfilename, 'w', encoding='utf-8') as outfile:
+ outfile.write(f'# This is the build file for project "{self.build.get_project()}"\n')
+ outfile.write('# It is autogenerated by the Meson build system.\n')
+ outfile.write('# Do not edit by hand.\n\n')
+ outfile.write('ninja_required_version = 1.8.2\n\n')
+
+ num_pools = self.environment.coredata.options[OptionKey('backend_max_links')].value
+ if num_pools > 0:
+ outfile.write(f'''pool link_pool
+ depth = {num_pools}
+
+''')
+
+ with self.detect_vs_dep_prefix(tempfilename) as outfile:
+ self.generate_rules()
+
+ self.build_elements = []
+ self.generate_phony()
+ self.add_build_comment(NinjaComment('Build rules for targets'))
+ for t in ProgressBar(self.build.get_targets().values(), desc='Generating targets'):
+ self.generate_target(t)
+ self.add_build_comment(NinjaComment('Test rules'))
+ self.generate_tests()
+ self.add_build_comment(NinjaComment('Install rules'))
+ self.generate_install()
+ self.generate_dist()
+ key = OptionKey('b_coverage')
+ if (key in self.environment.coredata.options and
+ self.environment.coredata.options[key].value):
+ self.add_build_comment(NinjaComment('Coverage rules'))
+ self.generate_coverage_rules()
+ self.add_build_comment(NinjaComment('Suffix'))
+ self.generate_utils()
+ self.generate_ending()
+
+ self.write_rules(outfile)
+ self.write_builds(outfile)
+
+ default = 'default all\n\n'
+ outfile.write(default)
+ # Only overwrite the old build file after the new one has been
+ # fully created.
+ os.replace(tempfilename, outfilename)
+ mlog.cmd_ci_include(outfilename) # For CI debugging
+ self.generate_compdb()
+
+ # http://clang.llvm.org/docs/JSONCompilationDatabase.html
+ def generate_compdb(self):
+ rules = []
+ # TODO: Rather than an explicit list here, rules could be marked in the
+ # rule store as being wanted in compdb
+ for for_machine in MachineChoice:
+ for lang in self.environment.coredata.compilers[for_machine]:
+ rules += [f"{rule}{ext}" for rule in [self.get_compiler_rule_name(lang, for_machine)]
+ for ext in ['', '_RSP']]
+ rules += [f"{rule}{ext}" for rule in [self.get_pch_rule_name(lang, for_machine)]
+ for ext in ['', '_RSP']]
+ compdb_options = ['-x'] if mesonlib.version_compare(self.ninja_version, '>=1.9') else []
+ ninja_compdb = self.ninja_command + ['-t', 'compdb'] + compdb_options + rules
+ builddir = self.environment.get_build_dir()
+ try:
+ jsondb = subprocess.check_output(ninja_compdb, cwd=builddir)
+ with open(os.path.join(builddir, 'compile_commands.json'), 'wb') as f:
+ f.write(jsondb)
+ except Exception:
+ mlog.warning('Could not create compilation database.')
+
+ # Get all generated headers. Any source file might need them so
+ # we need to add an order dependency to them.
+ def get_generated_headers(self, target):
+ if hasattr(target, 'cached_generated_headers'):
+ return target.cached_generated_headers
+ header_deps = []
+ # XXX: Why don't we add deps to CustomTarget headers here?
+ for genlist in target.get_generated_sources():
+ if isinstance(genlist, (build.CustomTarget, build.CustomTargetIndex)):
+ continue
+ for src in genlist.get_outputs():
+ if self.environment.is_header(src):
+ header_deps.append(self.get_target_generated_dir(target, genlist, src))
+ if 'vala' in target.compilers and not isinstance(target, build.Executable):
+ vala_header = File.from_built_file(self.get_target_dir(target), target.vala_header)
+ header_deps.append(vala_header)
+ # Recurse and find generated headers
+ for dep in itertools.chain(target.link_targets, target.link_whole_targets):
+ if isinstance(dep, (build.StaticLibrary, build.SharedLibrary)):
+ header_deps += self.get_generated_headers(dep)
+ target.cached_generated_headers = header_deps
+ return header_deps
+
+ def get_target_generated_sources(self, target: build.BuildTarget) -> T.MutableMapping[str, File]:
+ """
+ Returns a dictionary with the keys being the path to the file
+ (relative to the build directory) of that type and the value
+ being the GeneratorList or CustomTarget that generated it.
+ """
+ srcs: T.MutableMapping[str, File] = OrderedDict()
+ for gensrc in target.get_generated_sources():
+ for s in gensrc.get_outputs():
+ f = self.get_target_generated_dir(target, gensrc, s)
+ srcs[f] = s
+ return srcs
+
+ def get_target_sources(self, target: build.BuildTarget) -> T.MutableMapping[str, File]:
+ srcs: T.MutableMapping[str, File] = OrderedDict()
+ for s in target.get_sources():
+ # BuildTarget sources are always mesonlib.File files which are
+ # either in the source root, or generated with configure_file and
+ # in the build root
+ if not isinstance(s, File):
+ raise InvalidArguments(f'All sources in target {s!r} must be of type mesonlib.File')
+ f = s.rel_to_builddir(self.build_to_src)
+ srcs[f] = s
+ return srcs
+
+ def get_target_source_can_unity(self, target, source):
+ if isinstance(source, File):
+ source = source.fname
+ if self.environment.is_llvm_ir(source) or \
+ self.environment.is_assembly(source):
+ return False
+ suffix = os.path.splitext(source)[1][1:].lower()
+ for lang in backends.LANGS_CANT_UNITY:
+ if lang not in target.compilers:
+ continue
+ if suffix in target.compilers[lang].file_suffixes:
+ return False
+ return True
+
+ def create_target_source_introspection(self, target: build.Target, comp: compilers.Compiler, parameters, sources, generated_sources):
+ '''
+ Adds the source file introspection information for a language of a target
+
+ Internal introspection storage formart:
+ self.introspection_data = {
+ '<target ID>': {
+ <id tuple>: {
+ 'language: 'lang',
+ 'compiler': ['comp', 'exe', 'list'],
+ 'parameters': ['UNIQUE', 'parameter', 'list'],
+ 'sources': [],
+ 'generated_sources': [],
+ }
+ }
+ }
+ '''
+ tid = target.get_id()
+ lang = comp.get_language()
+ tgt = self.introspection_data[tid]
+ # Find an existing entry or create a new one
+ id_hash = (lang, tuple(parameters))
+ src_block = tgt.get(id_hash, None)
+ if src_block is None:
+ # Convert parameters
+ if isinstance(parameters, CompilerArgs):
+ parameters = parameters.to_native(copy=True)
+ parameters = comp.compute_parameters_with_absolute_paths(parameters, self.build_dir)
+ # The new entry
+ src_block = {
+ 'language': lang,
+ 'compiler': comp.get_exelist(),
+ 'parameters': parameters,
+ 'sources': [],
+ 'generated_sources': [],
+ }
+ tgt[id_hash] = src_block
+ # Make source files absolute
+ sources = [x.absolute_path(self.source_dir, self.build_dir) if isinstance(x, File) else os.path.normpath(os.path.join(self.build_dir, x))
+ for x in sources]
+ generated_sources = [x.absolute_path(self.source_dir, self.build_dir) if isinstance(x, File) else os.path.normpath(os.path.join(self.build_dir, x))
+ for x in generated_sources]
+ # Add the source files
+ src_block['sources'] += sources
+ src_block['generated_sources'] += generated_sources
+
+ def generate_target(self, target):
+ try:
+ if isinstance(target, build.BuildTarget):
+ os.makedirs(self.get_target_private_dir_abs(target))
+ except FileExistsError:
+ pass
+ if isinstance(target, build.CustomTarget):
+ self.generate_custom_target(target)
+ if isinstance(target, build.RunTarget):
+ self.generate_run_target(target)
+ compiled_sources = []
+ source2object = {}
+ name = target.get_id()
+ if name in self.processed_targets:
+ return
+ self.processed_targets.add(name)
+ # Initialize an empty introspection source list
+ self.introspection_data[name] = {}
+ # Generate rules for all dependency targets
+ self.process_target_dependencies(target)
+ # If target uses a language that cannot link to C objects,
+ # just generate for that language and return.
+ if isinstance(target, build.Jar):
+ self.generate_jar_target(target)
+ return
+ if target.uses_rust():
+ self.generate_rust_target(target)
+ return
+ if 'cs' in target.compilers:
+ self.generate_cs_target(target)
+ return
+ if 'swift' in target.compilers:
+ self.generate_swift_target(target)
+ return
+
+ # Pre-existing target C/C++ sources to be built; dict of full path to
+ # source relative to build root and the original File object.
+ target_sources: T.MutableMapping[str, File]
+
+ # GeneratedList and CustomTarget sources to be built; dict of the full
+ # path to source relative to build root and the generating target/list
+ generated_sources: T.MutableMapping[str, File]
+
+ # List of sources that have been transpiled from a DSL (like Vala) into
+ # a language that is haneled below, such as C or C++
+ transpiled_sources: T.List[str]
+
+ if 'vala' in target.compilers:
+ # Sources consumed by valac are filtered out. These only contain
+ # C/C++ sources, objects, generated libs, and unknown sources now.
+ target_sources, generated_sources, \
+ transpiled_sources = self.generate_vala_compile(target)
+ elif 'cython' in target.compilers:
+ target_sources, generated_sources, \
+ transpiled_sources = self.generate_cython_transpile(target)
+ else:
+ target_sources = self.get_target_sources(target)
+ generated_sources = self.get_target_generated_sources(target)
+ transpiled_sources = []
+ self.scan_fortran_module_outputs(target)
+ # Generate rules for GeneratedLists
+ self.generate_generator_list_rules(target)
+
+ # Generate rules for building the remaining source files in this target
+ outname = self.get_target_filename(target)
+ obj_list = []
+ is_unity = self.is_unity(target)
+ header_deps = []
+ unity_src = []
+ unity_deps = [] # Generated sources that must be built before compiling a Unity target.
+ header_deps += self.get_generated_headers(target)
+
+ if is_unity:
+ # Warn about incompatible sources if a unity build is enabled
+ langs = set(target.compilers.keys())
+ langs_cant = langs.intersection(backends.LANGS_CANT_UNITY)
+ if langs_cant:
+ langs_are = langs = ', '.join(langs_cant).upper()
+ langs_are += ' are' if len(langs_cant) > 1 else ' is'
+ msg = f'{langs_are} not supported in Unity builds yet, so {langs} ' \
+ f'sources in the {target.name!r} target will be compiled normally'
+ mlog.log(mlog.red('FIXME'), msg)
+
+ # Get a list of all generated headers that will be needed while building
+ # this target's sources (generated sources and pre-existing sources).
+ # This will be set as dependencies of all the target's sources. At the
+ # same time, also deal with generated sources that need to be compiled.
+ generated_source_files = []
+ for rel_src in generated_sources.keys():
+ dirpart, fnamepart = os.path.split(rel_src)
+ raw_src = File(True, dirpart, fnamepart)
+ if self.environment.is_source(rel_src) and not self.environment.is_header(rel_src):
+ if is_unity and self.get_target_source_can_unity(target, rel_src):
+ unity_deps.append(raw_src)
+ abs_src = os.path.join(self.environment.get_build_dir(), rel_src)
+ unity_src.append(abs_src)
+ else:
+ generated_source_files.append(raw_src)
+ elif self.environment.is_object(rel_src):
+ obj_list.append(rel_src)
+ elif self.environment.is_library(rel_src) or modules.is_module_library(rel_src):
+ pass
+ else:
+ # Assume anything not specifically a source file is a header. This is because
+ # people generate files with weird suffixes (.inc, .fh) that they then include
+ # in their source files.
+ header_deps.append(raw_src)
+ # These are the generated source files that need to be built for use by
+ # this target. We create the Ninja build file elements for this here
+ # because we need `header_deps` to be fully generated in the above loop.
+ for src in generated_source_files:
+ if self.environment.is_llvm_ir(src):
+ o, s = self.generate_llvm_ir_compile(target, src)
+ else:
+ o, s = self.generate_single_compile(target, src, True,
+ order_deps=header_deps)
+ compiled_sources.append(s)
+ source2object[s] = o
+ obj_list.append(o)
+
+ use_pch = self.environment.coredata.options.get(OptionKey('b_pch'))
+ if use_pch and target.has_pch():
+ pch_objects = self.generate_pch(target, header_deps=header_deps)
+ else:
+ pch_objects = []
+
+ # Generate compilation targets for C sources generated from Vala
+ # sources. This can be extended to other $LANG->C compilers later if
+ # necessary. This needs to be separate for at least Vala
+ #
+ # Do not try to unity-build the generated c files from vala, as these
+ # often contain duplicate symbols and will fail to compile properly
+ vala_generated_source_files = []
+ for src in transpiled_sources:
+ dirpart, fnamepart = os.path.split(src)
+ raw_src = File(True, dirpart, fnamepart)
+ # Generated targets are ordered deps because the must exist
+ # before the sources compiling them are used. After the first
+ # compile we get precise dependency info from dep files.
+ # This should work in all cases. If it does not, then just
+ # move them from orderdeps to proper deps.
+ if self.environment.is_header(src):
+ header_deps.append(raw_src)
+ else:
+ # We gather all these and generate compile rules below
+ # after `header_deps` (above) is fully generated
+ vala_generated_source_files.append(raw_src)
+ for src in vala_generated_source_files:
+ # Passing 'vala' here signifies that we want the compile
+ # arguments to be specialized for C code generated by
+ # valac. For instance, no warnings should be emitted.
+ o, s = self.generate_single_compile(target, src, 'vala', [], header_deps)
+ obj_list.append(o)
+
+ # Generate compile targets for all the pre-existing sources for this target
+ for src in target_sources.values():
+ if not self.environment.is_header(src):
+ if self.environment.is_llvm_ir(src):
+ o, s = self.generate_llvm_ir_compile(target, src)
+ obj_list.append(o)
+ elif is_unity and self.get_target_source_can_unity(target, src):
+ abs_src = os.path.join(self.environment.get_build_dir(),
+ src.rel_to_builddir(self.build_to_src))
+ unity_src.append(abs_src)
+ else:
+ o, s = self.generate_single_compile(target, src, False, [], header_deps)
+ obj_list.append(o)
+ compiled_sources.append(s)
+ source2object[s] = o
+
+ obj_list += self.flatten_object_list(target)
+ if is_unity:
+ for src in self.generate_unity_files(target, unity_src):
+ o, s = self.generate_single_compile(target, src, True, unity_deps + header_deps)
+ obj_list.append(o)
+ compiled_sources.append(s)
+ source2object[s] = o
+ linker, stdlib_args = self.determine_linker_and_stdlib_args(target)
+ if isinstance(target, build.StaticLibrary) and target.prelink:
+ final_obj_list = self.generate_prelink(target, obj_list)
+ else:
+ final_obj_list = obj_list
+ elem = self.generate_link(target, outname, final_obj_list, linker, pch_objects, stdlib_args=stdlib_args)
+ self.generate_dependency_scan_target(target, compiled_sources, source2object)
+ self.generate_shlib_aliases(target, self.get_target_dir(target))
+ self.add_build(elem)
+
+ def should_use_dyndeps_for_target(self, target: 'build.BuildTarget') -> bool:
+ if mesonlib.version_compare(self.ninja_version, '<1.10.0'):
+ return False
+ if 'fortran' in target.compilers:
+ return True
+ if 'cpp' not in target.compilers:
+ return False
+ # Currently only the preview version of Visual Studio is supported.
+ cpp = target.compilers['cpp']
+ if cpp.get_id() != 'msvc':
+ return False
+ cppversion = self.environment.coredata.options[OptionKey('std', machine=target.for_machine, lang='cpp')].value
+ if cppversion not in ('latest', 'c++latest', 'vc++latest'):
+ return False
+ if not mesonlib.current_vs_supports_modules():
+ return False
+ if mesonlib.version_compare(cpp.version, '<19.28.28617'):
+ return False
+ return True
+
+ def generate_dependency_scan_target(self, target, compiled_sources, source2object):
+ if not self.should_use_dyndeps_for_target(target):
+ return
+ depscan_file = self.get_dep_scan_file_for(target)
+ pickle_base = target.name + '.dat'
+ pickle_file = os.path.join(self.get_target_private_dir(target), pickle_base).replace('\\', '/')
+ pickle_abs = os.path.join(self.get_target_private_dir_abs(target), pickle_base).replace('\\', '/')
+ rule_name = 'depscan'
+ scan_sources = self.select_sources_to_scan(compiled_sources)
+ elem = NinjaBuildElement(self.all_outputs, depscan_file, rule_name, scan_sources)
+ elem.add_item('picklefile', pickle_file)
+ scaninfo = TargetDependencyScannerInfo(self.get_target_private_dir(target), source2object)
+ with open(pickle_abs, 'wb') as p:
+ pickle.dump(scaninfo, p)
+ self.add_build(elem)
+
+ def select_sources_to_scan(self, compiled_sources):
+ # in practice pick up C++ and Fortran files. If some other language
+ # requires scanning (possibly Java to deal with inner class files)
+ # then add them here.
+ all_suffixes = set(compilers.lang_suffixes['cpp']) | set(compilers.lang_suffixes['fortran'])
+ selected_sources = []
+ for source in compiled_sources:
+ ext = os.path.splitext(source)[1][1:].lower()
+ if ext in all_suffixes:
+ selected_sources.append(source)
+ return selected_sources
+
+ def process_target_dependencies(self, target):
+ for t in target.get_dependencies():
+ if t.get_id() not in self.processed_targets:
+ self.generate_target(t)
+
+ def custom_target_generator_inputs(self, target):
+ for s in target.sources:
+ if isinstance(s, build.GeneratedList):
+ self.generate_genlist_for_target(s, target)
+
+ def unwrap_dep_list(self, target):
+ deps = []
+ for i in target.get_dependencies():
+ # FIXME, should not grab element at zero but rather expand all.
+ if isinstance(i, list):
+ i = i[0]
+ # Add a dependency on all the outputs of this target
+ for output in i.get_outputs():
+ deps.append(os.path.join(self.get_target_dir(i), output))
+ return deps
+
+ def generate_custom_target(self, target):
+ self.custom_target_generator_inputs(target)
+ (srcs, ofilenames, cmd) = self.eval_custom_target_command(target)
+ deps = self.unwrap_dep_list(target)
+ deps += self.get_custom_target_depend_files(target)
+ if target.build_always_stale:
+ deps.append('PHONY')
+ if target.depfile is None:
+ rulename = 'CUSTOM_COMMAND'
+ else:
+ rulename = 'CUSTOM_COMMAND_DEP'
+ elem = NinjaBuildElement(self.all_outputs, ofilenames, rulename, srcs)
+ elem.add_dep(deps)
+ for d in target.extra_depends:
+ # Add a dependency on all the outputs of this target
+ for output in d.get_outputs():
+ elem.add_dep(os.path.join(self.get_target_dir(d), output))
+
+ cmd, reason = self.as_meson_exe_cmdline(target.name, target.command[0], cmd[1:],
+ extra_bdeps=target.get_transitive_build_target_deps(),
+ capture=ofilenames[0] if target.capture else None,
+ feed=srcs[0] if target.feed else None,
+ env=target.env)
+ if reason:
+ cmd_type = f' (wrapped by meson {reason})'
+ else:
+ cmd_type = ''
+ if target.depfile is not None:
+ depfile = target.get_dep_outname(elem.infilenames)
+ rel_dfile = os.path.join(self.get_target_dir(target), depfile)
+ abs_pdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
+ os.makedirs(abs_pdir, exist_ok=True)
+ elem.add_item('DEPFILE', rel_dfile)
+ if target.console:
+ elem.add_item('pool', 'console')
+ elem.add_item('COMMAND', cmd)
+ elem.add_item('description', f'Generating {target.name} with a custom command{cmd_type}')
+ self.add_build(elem)
+ self.processed_targets.add(target.get_id())
+
+ def build_run_target_name(self, target):
+ if target.subproject != '':
+ subproject_prefix = f'{target.subproject}@@'
+ else:
+ subproject_prefix = ''
+ return f'{subproject_prefix}{target.name}'
+
+ def generate_run_target(self, target):
+ target_name = self.build_run_target_name(target)
+ if not target.command:
+ # This is an alias target, it has no command, it just depends on
+ # other targets.
+ elem = NinjaBuildElement(self.all_outputs, target_name, 'phony', [])
+ else:
+ target_env = self.get_run_target_env(target)
+ _, _, cmd = self.eval_custom_target_command(target)
+ meson_exe_cmd, reason = self.as_meson_exe_cmdline(target_name, target.command[0], cmd[1:],
+ force_serialize=True, env=target_env,
+ verbose=True)
+ cmd_type = f' (wrapped by meson {reason})'
+ internal_target_name = f'meson-{target_name}'
+ elem = NinjaBuildElement(self.all_outputs, internal_target_name, 'CUSTOM_COMMAND', [])
+ elem.add_item('COMMAND', meson_exe_cmd)
+ elem.add_item('description', f'Running external command {target.name}{cmd_type}')
+ elem.add_item('pool', 'console')
+ # Alias that runs the target defined above with the name the user specified
+ self.create_target_alias(internal_target_name)
+ deps = self.unwrap_dep_list(target)
+ deps += self.get_custom_target_depend_files(target)
+ elem.add_dep(deps)
+ self.add_build(elem)
+ self.processed_targets.add(target.get_id())
+
+ def generate_coverage_command(self, elem, outputs):
+ targets = self.build.get_targets().values()
+ use_llvm_cov = False
+ for target in targets:
+ if not hasattr(target, 'compilers'):
+ continue
+ for compiler in target.compilers.values():
+ if compiler.get_id() == 'clang' and not compiler.info.is_darwin():
+ use_llvm_cov = True
+ break
+ elem.add_item('COMMAND', self.environment.get_build_command() +
+ ['--internal', 'coverage'] +
+ outputs +
+ [self.environment.get_source_dir(),
+ os.path.join(self.environment.get_source_dir(),
+ self.build.get_subproject_dir()),
+ self.environment.get_build_dir(),
+ self.environment.get_log_dir()] +
+ (['--use_llvm_cov'] if use_llvm_cov else []))
+
+ def generate_coverage_rules(self):
+ e = NinjaBuildElement(self.all_outputs, 'meson-coverage', 'CUSTOM_COMMAND', 'PHONY')
+ self.generate_coverage_command(e, [])
+ e.add_item('description', 'Generates coverage reports')
+ self.add_build(e)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-coverage')
+ self.generate_coverage_legacy_rules()
+
+ def generate_coverage_legacy_rules(self):
+ e = NinjaBuildElement(self.all_outputs, 'meson-coverage-xml', 'CUSTOM_COMMAND', 'PHONY')
+ self.generate_coverage_command(e, ['--xml'])
+ e.add_item('description', 'Generates XML coverage report')
+ self.add_build(e)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-coverage-xml')
+
+ e = NinjaBuildElement(self.all_outputs, 'meson-coverage-text', 'CUSTOM_COMMAND', 'PHONY')
+ self.generate_coverage_command(e, ['--text'])
+ e.add_item('description', 'Generates text coverage report')
+ self.add_build(e)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-coverage-text')
+
+ e = NinjaBuildElement(self.all_outputs, 'meson-coverage-html', 'CUSTOM_COMMAND', 'PHONY')
+ self.generate_coverage_command(e, ['--html'])
+ e.add_item('description', 'Generates HTML coverage report')
+ self.add_build(e)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-coverage-html')
+
+ def generate_install(self):
+ self.create_install_data_files()
+ elem = NinjaBuildElement(self.all_outputs, 'meson-install', 'CUSTOM_COMMAND', 'PHONY')
+ elem.add_dep('all')
+ elem.add_item('DESC', 'Installing files.')
+ elem.add_item('COMMAND', self.environment.get_build_command() + ['install', '--no-rebuild'])
+ elem.add_item('pool', 'console')
+ self.add_build(elem)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-install')
+
+ def generate_tests(self):
+ self.serialize_tests()
+ cmd = self.environment.get_build_command(True) + ['test', '--no-rebuild']
+ if not self.environment.coredata.get_option(OptionKey('stdsplit')):
+ cmd += ['--no-stdsplit']
+ if self.environment.coredata.get_option(OptionKey('errorlogs')):
+ cmd += ['--print-errorlogs']
+ elem = NinjaBuildElement(self.all_outputs, 'meson-test', 'CUSTOM_COMMAND', ['all', 'PHONY'])
+ elem.add_item('COMMAND', cmd)
+ elem.add_item('DESC', 'Running all tests.')
+ elem.add_item('pool', 'console')
+ self.add_build(elem)
+ # Alias that runs the above-defined meson-test target
+ self.create_target_alias('meson-test')
+
+ # And then benchmarks.
+ cmd = self.environment.get_build_command(True) + [
+ 'test', '--benchmark', '--logbase',
+ 'benchmarklog', '--num-processes=1', '--no-rebuild']
+ elem = NinjaBuildElement(self.all_outputs, 'meson-benchmark', 'CUSTOM_COMMAND', ['all', 'PHONY'])
+ elem.add_item('COMMAND', cmd)
+ elem.add_item('DESC', 'Running benchmark suite.')
+ elem.add_item('pool', 'console')
+ self.add_build(elem)
+ # Alias that runs the above-defined meson-benchmark target
+ self.create_target_alias('meson-benchmark')
+
+ def generate_rules(self):
+ self.rules = []
+ self.ruledict = {}
+
+ self.add_rule_comment(NinjaComment('Rules for module scanning.'))
+ self.generate_scanner_rules()
+ self.add_rule_comment(NinjaComment('Rules for compiling.'))
+ self.generate_compile_rules()
+ self.add_rule_comment(NinjaComment('Rules for linking.'))
+ self.generate_static_link_rules()
+ self.generate_dynamic_link_rules()
+ self.add_rule_comment(NinjaComment('Other rules'))
+ # Ninja errors out if you have deps = gcc but no depfile, so we must
+ # have two rules for custom commands.
+ self.add_rule(NinjaRule('CUSTOM_COMMAND', ['$COMMAND'], [], '$DESC',
+ extra='restat = 1'))
+ self.add_rule(NinjaRule('CUSTOM_COMMAND_DEP', ['$COMMAND'], [], '$DESC',
+ deps='gcc', depfile='$DEPFILE',
+ extra='restat = 1'))
+
+ c = self.environment.get_build_command() + \
+ ['--internal',
+ 'regenerate',
+ self.environment.get_source_dir(),
+ self.environment.get_build_dir(),
+ '--backend',
+ 'ninja']
+ self.add_rule(NinjaRule('REGENERATE_BUILD',
+ c, [],
+ 'Regenerating build files.',
+ extra='generator = 1'))
+
+ def add_rule_comment(self, comment):
+ self.rules.append(comment)
+
+ def add_build_comment(self, comment):
+ self.build_elements.append(comment)
+
+ def add_rule(self, rule):
+ if rule.name in self.ruledict:
+ raise MesonException(f'Tried to add rule {rule.name} twice.')
+ self.rules.append(rule)
+ self.ruledict[rule.name] = rule
+
+ def add_build(self, build):
+ self.build_elements.append(build)
+
+ if build.rulename != 'phony':
+ # reference rule
+ if build.rulename in self.ruledict:
+ build.rule = self.ruledict[build.rulename]
+ else:
+ mlog.warning(f"build statement for {build.outfilenames} references non-existent rule {build.rulename}")
+
+ def write_rules(self, outfile):
+ for b in self.build_elements:
+ if isinstance(b, NinjaBuildElement):
+ b.count_rule_references()
+
+ for r in self.rules:
+ r.write(outfile)
+
+ def write_builds(self, outfile):
+ for b in ProgressBar(self.build_elements, desc='Writing build.ninja'):
+ b.write(outfile)
+
+ def generate_phony(self):
+ self.add_build_comment(NinjaComment('Phony build target, always out of date'))
+ elem = NinjaBuildElement(self.all_outputs, 'PHONY', 'phony', '')
+ self.add_build(elem)
+
+ def generate_jar_target(self, target):
+ fname = target.get_filename()
+ outname_rel = os.path.join(self.get_target_dir(target), fname)
+ src_list = target.get_sources()
+ class_list = []
+ compiler = target.compilers['java']
+ c = 'c'
+ m = 'm'
+ e = ''
+ f = 'f'
+ main_class = target.get_main_class()
+ if main_class != '':
+ e = 'e'
+
+ # Add possible java generated files to src list
+ generated_sources = self.get_target_generated_sources(target)
+ gen_src_list = []
+ for rel_src in generated_sources.keys():
+ dirpart, fnamepart = os.path.split(rel_src)
+ raw_src = File(True, dirpart, fnamepart)
+ if rel_src.endswith('.java'):
+ gen_src_list.append(raw_src)
+
+ compile_args = self.determine_single_java_compile_args(target, compiler)
+ for src in src_list + gen_src_list:
+ plain_class_path = self.generate_single_java_compile(src, target, compiler, compile_args)
+ class_list.append(plain_class_path)
+ class_dep_list = [os.path.join(self.get_target_private_dir(target), i) for i in class_list]
+ manifest_path = os.path.join(self.get_target_private_dir(target), 'META-INF', 'MANIFEST.MF')
+ manifest_fullpath = os.path.join(self.environment.get_build_dir(), manifest_path)
+ os.makedirs(os.path.dirname(manifest_fullpath), exist_ok=True)
+ with open(manifest_fullpath, 'w', encoding='utf-8') as manifest:
+ if any(target.link_targets):
+ manifest.write('Class-Path: ')
+ cp_paths = [os.path.join(self.get_target_dir(l), l.get_filename()) for l in target.link_targets]
+ manifest.write(' '.join(cp_paths))
+ manifest.write('\n')
+ jar_rule = 'java_LINKER'
+ commands = [c + m + e + f]
+ commands.append(manifest_path)
+ if e != '':
+ commands.append(main_class)
+ commands.append(self.get_target_filename(target))
+ # Java compilation can produce an arbitrary number of output
+ # class files for a single source file. Thus tell jar to just
+ # grab everything in the final package.
+ commands += ['-C', self.get_target_private_dir(target), '.']
+ elem = NinjaBuildElement(self.all_outputs, outname_rel, jar_rule, [])
+ elem.add_dep(class_dep_list)
+ elem.add_item('ARGS', commands)
+ self.add_build(elem)
+ # Create introspection information
+ self.create_target_source_introspection(target, compiler, compile_args, src_list, gen_src_list)
+
+ def generate_cs_resource_tasks(self, target):
+ args = []
+ deps = []
+ for r in target.resources:
+ rel_sourcefile = os.path.join(self.build_to_src, target.subdir, r)
+ if r.endswith('.resources'):
+ a = '-resource:' + rel_sourcefile
+ elif r.endswith('.txt') or r.endswith('.resx'):
+ ofilebase = os.path.splitext(os.path.basename(r))[0] + '.resources'
+ ofilename = os.path.join(self.get_target_private_dir(target), ofilebase)
+ elem = NinjaBuildElement(self.all_outputs, ofilename, "CUSTOM_COMMAND", rel_sourcefile)
+ elem.add_item('COMMAND', ['resgen', rel_sourcefile, ofilename])
+ elem.add_item('DESC', f'Compiling resource {rel_sourcefile}')
+ self.add_build(elem)
+ deps.append(ofilename)
+ a = '-resource:' + ofilename
+ else:
+ raise InvalidArguments(f'Unknown resource file {r}.')
+ args.append(a)
+ return args, deps
+
+ def generate_cs_target(self, target: build.BuildTarget):
+ buildtype = self.get_option_for_target(OptionKey('buildtype'), target)
+ fname = target.get_filename()
+ outname_rel = os.path.join(self.get_target_dir(target), fname)
+ src_list = target.get_sources()
+ compiler = target.compilers['cs']
+ rel_srcs = [os.path.normpath(s.rel_to_builddir(self.build_to_src)) for s in src_list]
+ deps = []
+ commands = compiler.compiler_args(target.extra_args.get('cs', []))
+ commands += compiler.get_buildtype_args(buildtype)
+ commands += compiler.get_optimization_args(self.get_option_for_target(OptionKey('optimization'), target))
+ commands += compiler.get_debug_args(self.get_option_for_target(OptionKey('debug'), target))
+ if isinstance(target, build.Executable):
+ commands.append('-target:exe')
+ elif isinstance(target, build.SharedLibrary):
+ commands.append('-target:library')
+ else:
+ raise MesonException('Unknown C# target type.')
+ (resource_args, resource_deps) = self.generate_cs_resource_tasks(target)
+ commands += resource_args
+ deps += resource_deps
+ commands += compiler.get_output_args(outname_rel)
+ for l in target.link_targets:
+ lname = os.path.join(self.get_target_dir(l), l.get_filename())
+ commands += compiler.get_link_args(lname)
+ deps.append(lname)
+ if '-g' in commands:
+ outputs = [outname_rel, outname_rel + '.mdb']
+ else:
+ outputs = [outname_rel]
+ generated_sources = self.get_target_generated_sources(target)
+ generated_rel_srcs = []
+ for rel_src in generated_sources.keys():
+ if rel_src.lower().endswith('.cs'):
+ generated_rel_srcs.append(os.path.normpath(rel_src))
+ deps.append(os.path.normpath(rel_src))
+
+ for dep in target.get_external_deps():
+ commands.extend_direct(dep.get_link_args())
+ commands += self.build.get_project_args(compiler, target.subproject, target.for_machine)
+ commands += self.build.get_global_args(compiler, target.for_machine)
+
+ elem = NinjaBuildElement(self.all_outputs, outputs, self.get_compiler_rule_name('cs', target.for_machine), rel_srcs + generated_rel_srcs)
+ elem.add_dep(deps)
+ elem.add_item('ARGS', commands)
+ self.add_build(elem)
+
+ self.generate_generator_list_rules(target)
+ self.create_target_source_introspection(target, compiler, commands, rel_srcs, generated_rel_srcs)
+
+ def determine_single_java_compile_args(self, target, compiler):
+ args = []
+ args += compiler.get_buildtype_args(self.get_option_for_target(OptionKey('buildtype'), target))
+ args += self.build.get_global_args(compiler, target.for_machine)
+ args += self.build.get_project_args(compiler, target.subproject, target.for_machine)
+ args += target.get_java_args()
+ args += compiler.get_output_args(self.get_target_private_dir(target))
+ args += target.get_classpath_args()
+ curdir = target.get_subdir()
+ sourcepath = os.path.join(self.build_to_src, curdir) + os.pathsep
+ sourcepath += os.path.normpath(curdir) + os.pathsep
+ for i in target.include_dirs:
+ for idir in i.get_incdirs():
+ sourcepath += os.path.join(self.build_to_src, i.curdir, idir) + os.pathsep
+ args += ['-sourcepath', sourcepath]
+ return args
+
+ def generate_single_java_compile(self, src, target, compiler, args):
+ deps = [os.path.join(self.get_target_dir(l), l.get_filename()) for l in target.link_targets]
+ generated_sources = self.get_target_generated_sources(target)
+ for rel_src in generated_sources.keys():
+ if rel_src.endswith('.java'):
+ deps.append(rel_src)
+ rel_src = src.rel_to_builddir(self.build_to_src)
+ plain_class_path = src.fname[:-4] + 'class'
+ rel_obj = os.path.join(self.get_target_private_dir(target), plain_class_path)
+ element = NinjaBuildElement(self.all_outputs, rel_obj, self.compiler_to_rule_name(compiler), rel_src)
+ element.add_dep(deps)
+ element.add_item('ARGS', args)
+ self.add_build(element)
+ return plain_class_path
+
+ def generate_java_link(self):
+ rule = 'java_LINKER'
+ command = ['jar', '$ARGS']
+ description = 'Creating JAR $out'
+ self.add_rule(NinjaRule(rule, command, [], description))
+
+ def determine_dep_vapis(self, target):
+ """
+ Peek into the sources of BuildTargets we're linking with, and if any of
+ them was built with Vala, assume that it also generated a .vapi file of
+ the same name as the BuildTarget and return the path to it relative to
+ the build directory.
+ """
+ result = OrderedSet()
+ for dep in itertools.chain(target.link_targets, target.link_whole_targets):
+ if not dep.is_linkable_target():
+ continue
+ for i in dep.sources:
+ if hasattr(i, 'fname'):
+ i = i.fname
+ if i.endswith('vala'):
+ vapiname = dep.vala_vapi
+ fullname = os.path.join(self.get_target_dir(dep), vapiname)
+ result.add(fullname)
+ break
+ return list(result)
+
+ def split_vala_sources(self, t: build.BuildTarget) -> \
+ T.Tuple[T.MutableMapping[str, File], T.MutableMapping[str, File],
+ T.Tuple[T.MutableMapping[str, File], T.MutableMapping]]:
+ """
+ Splits the target's sources into .vala, .gs, .vapi, and other sources.
+ Handles both pre-existing and generated sources.
+
+ Returns a tuple (vala, vapi, others) each of which is a dictionary with
+ the keys being the path to the file (relative to the build directory)
+ and the value being the object that generated or represents the file.
+ """
+ vala: T.MutableMapping[str, File] = OrderedDict()
+ vapi: T.MutableMapping[str, File] = OrderedDict()
+ others: T.MutableMapping[str, File] = OrderedDict()
+ othersgen: T.MutableMapping[str, File] = OrderedDict()
+ # Split pre-existing sources
+ for s in t.get_sources():
+ # BuildTarget sources are always mesonlib.File files which are
+ # either in the source root, or generated with configure_file and
+ # in the build root
+ if not isinstance(s, File):
+ raise InvalidArguments(f'All sources in target {t!r} must be of type mesonlib.File, not {s!r}')
+ f = s.rel_to_builddir(self.build_to_src)
+ if s.endswith(('.vala', '.gs')):
+ srctype = vala
+ elif s.endswith('.vapi'):
+ srctype = vapi
+ else:
+ srctype = others
+ srctype[f] = s
+ # Split generated sources
+ for gensrc in t.get_generated_sources():
+ for s in gensrc.get_outputs():
+ f = self.get_target_generated_dir(t, gensrc, s)
+ if s.endswith(('.vala', '.gs')):
+ srctype = vala
+ elif s.endswith('.vapi'):
+ srctype = vapi
+ # Generated non-Vala (C/C++) sources. Won't be used for
+ # generating the Vala compile rule below.
+ else:
+ srctype = othersgen
+ # Duplicate outputs are disastrous
+ if f in srctype and srctype[f] is not gensrc:
+ msg = 'Duplicate output {0!r} from {1!r} {2!r}; ' \
+ 'conflicts with {0!r} from {4!r} {3!r}' \
+ ''.format(f, type(gensrc).__name__, gensrc.name,
+ srctype[f].name, type(srctype[f]).__name__)
+ raise InvalidArguments(msg)
+ # Store 'somefile.vala': GeneratedList (or CustomTarget)
+ srctype[f] = gensrc
+ return vala, vapi, (others, othersgen)
+
+ def generate_vala_compile(self, target: build.BuildTarget) -> \
+ T.Tuple[T.MutableMapping[str, File], T.MutableMapping[str, File], T.List[str]]:
+ """Vala is compiled into C. Set up all necessary build steps here."""
+ (vala_src, vapi_src, other_src) = self.split_vala_sources(target)
+ extra_dep_files = []
+ if not vala_src:
+ raise InvalidArguments(f'Vala library {target.name!r} has no Vala or Genie source files.')
+
+ valac = target.compilers['vala']
+ c_out_dir = self.get_target_private_dir(target)
+ # C files generated by valac
+ vala_c_src: T.List[str] = []
+ # Files generated by valac
+ valac_outputs: T.List = []
+ # All sources that are passed to valac on the commandline
+ all_files = list(vapi_src)
+ # Passed as --basedir
+ srcbasedir = os.path.join(self.build_to_src, target.get_subdir())
+ for (vala_file, gensrc) in vala_src.items():
+ all_files.append(vala_file)
+ # Figure out where the Vala compiler will write the compiled C file
+ #
+ # If the Vala file is in a subdir of the build dir (in our case
+ # because it was generated/built by something else), and is also
+ # a subdir of --basedir (because the builddir is in the source
+ # tree, and the target subdir is the source root), the subdir
+ # components from the source root till the private builddir will be
+ # duplicated inside the private builddir. Otherwise, just the
+ # basename will be used.
+ #
+ # If the Vala file is outside the build directory, the paths from
+ # the --basedir till the subdir will be duplicated inside the
+ # private builddir.
+ if isinstance(gensrc, (build.CustomTarget, build.GeneratedList)) or gensrc.is_built:
+ vala_c_file = os.path.splitext(os.path.basename(vala_file))[0] + '.c'
+ # Check if the vala file is in a subdir of --basedir
+ abs_srcbasedir = os.path.join(self.environment.get_source_dir(), target.get_subdir())
+ abs_vala_file = os.path.join(self.environment.get_build_dir(), vala_file)
+ if PurePath(os.path.commonpath((abs_srcbasedir, abs_vala_file))) == PurePath(abs_srcbasedir):
+ vala_c_subdir = PurePath(abs_vala_file).parent.relative_to(abs_srcbasedir)
+ vala_c_file = os.path.join(str(vala_c_subdir), vala_c_file)
+ else:
+ path_to_target = os.path.join(self.build_to_src, target.get_subdir())
+ if vala_file.startswith(path_to_target):
+ vala_c_file = os.path.splitext(os.path.relpath(vala_file, path_to_target))[0] + '.c'
+ else:
+ vala_c_file = os.path.splitext(os.path.basename(vala_file))[0] + '.c'
+ # All this will be placed inside the c_out_dir
+ vala_c_file = os.path.join(c_out_dir, vala_c_file)
+ vala_c_src.append(vala_c_file)
+ valac_outputs.append(vala_c_file)
+
+ args = self.generate_basic_compiler_args(target, valac)
+ args += valac.get_colorout_args(self.environment.coredata.options.get(OptionKey('b_colorout')).value)
+ # Tell Valac to output everything in our private directory. Sadly this
+ # means it will also preserve the directory components of Vala sources
+ # found inside the build tree (generated sources).
+ args += ['--directory', c_out_dir]
+ args += ['--basedir', srcbasedir]
+ if target.is_linkable_target():
+ # Library name
+ args += ['--library', target.name]
+ # Outputted header
+ hname = os.path.join(self.get_target_dir(target), target.vala_header)
+ args += ['--header', hname]
+ if self.is_unity(target):
+ # Without this the declarations will get duplicated in the .c
+ # files and cause a build failure when all of them are
+ # #include-d in one .c file.
+ # https://github.com/mesonbuild/meson/issues/1969
+ args += ['--use-header']
+ valac_outputs.append(hname)
+ # Outputted vapi file
+ vapiname = os.path.join(self.get_target_dir(target), target.vala_vapi)
+ # Force valac to write the vapi and gir files in the target build dir.
+ # Without this, it will write it inside c_out_dir
+ args += ['--vapi', os.path.join('..', target.vala_vapi)]
+ valac_outputs.append(vapiname)
+ target.outputs += [target.vala_header, target.vala_vapi]
+ # Install header and vapi to default locations if user requests this
+ if len(target.install_dir) > 1 and target.install_dir[1] is True:
+ target.install_dir[1] = self.environment.get_includedir()
+ if len(target.install_dir) > 2 and target.install_dir[2] is True:
+ target.install_dir[2] = os.path.join(self.environment.get_datadir(), 'vala', 'vapi')
+ # Generate GIR if requested
+ if isinstance(target.vala_gir, str):
+ girname = os.path.join(self.get_target_dir(target), target.vala_gir)
+ args += ['--gir', os.path.join('..', target.vala_gir)]
+ valac_outputs.append(girname)
+ target.outputs.append(target.vala_gir)
+ # Install GIR to default location if requested by user
+ if len(target.install_dir) > 3 and target.install_dir[3] is True:
+ target.install_dir[3] = os.path.join(self.environment.get_datadir(), 'gir-1.0')
+ # Detect gresources and add --gresources arguments for each
+ for gensrc in other_src[1].values():
+ if isinstance(gensrc, modules.GResourceTarget):
+ gres_xml, = self.get_custom_target_sources(gensrc)
+ args += ['--gresources=' + gres_xml]
+ extra_args = []
+
+ for a in target.extra_args.get('vala', []):
+ if isinstance(a, File):
+ relname = a.rel_to_builddir(self.build_to_src)
+ extra_dep_files.append(relname)
+ extra_args.append(relname)
+ else:
+ extra_args.append(a)
+ dependency_vapis = self.determine_dep_vapis(target)
+ extra_dep_files += dependency_vapis
+ args += extra_args
+ element = NinjaBuildElement(self.all_outputs, valac_outputs,
+ self.compiler_to_rule_name(valac),
+ all_files + dependency_vapis)
+ element.add_item('ARGS', args)
+ element.add_dep(extra_dep_files)
+ self.add_build(element)
+ self.create_target_source_introspection(target, valac, args, all_files, [])
+ return other_src[0], other_src[1], vala_c_src
+
+ def generate_cython_transpile(self, target: build.BuildTarget) -> \
+ T.Tuple[T.MutableMapping[str, File], T.MutableMapping[str, File], T.List[str]]:
+ """Generate rules for transpiling Cython files to C or C++
+
+ XXX: Currently only C is handled.
+ """
+ static_sources: T.MutableMapping[str, File] = OrderedDict()
+ generated_sources: T.MutableMapping[str, File] = OrderedDict()
+ cython_sources: T.List[str] = []
+
+ cython = target.compilers['cython']
+
+ opt_proxy = self.get_compiler_options_for_target(target)
+
+ args: T.List[str] = []
+ args += cython.get_always_args()
+ args += cython.get_buildtype_args(self.get_option_for_target(OptionKey('buildtype'), target))
+ args += cython.get_debug_args(self.get_option_for_target(OptionKey('debug'), target))
+ args += cython.get_optimization_args(self.get_option_for_target(OptionKey('optimization'), target))
+ args += cython.get_option_compile_args(opt_proxy)
+ args += self.build.get_global_args(cython, target.for_machine)
+ args += self.build.get_project_args(cython, target.subproject, target.for_machine)
+
+ for src in target.get_sources():
+ if src.endswith('.pyx'):
+ output = os.path.join(self.get_target_private_dir(target), f'{src}.c')
+ args = args.copy()
+ args += cython.get_output_args(output)
+ element = NinjaBuildElement(
+ self.all_outputs, [output],
+ self.compiler_to_rule_name(cython),
+ [src.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir())])
+ element.add_item('ARGS', args)
+ self.add_build(element)
+ # TODO: introspection?
+ cython_sources.append(output)
+ else:
+ static_sources[src.rel_to_builddir(self.build_to_src)] = src
+
+ for gen in target.get_generated_sources():
+ for ssrc in gen.get_outputs():
+ if isinstance(gen, GeneratedList):
+ ssrc = os.path.join(self.get_target_private_dir(target) , ssrc)
+ else:
+ ssrc = os.path.join(gen.get_subdir(), ssrc)
+ if ssrc.endswith('.pyx'):
+ args = args.copy()
+ output = os.path.join(self.get_target_private_dir(target), f'{ssrc}.c')
+ args += cython.get_output_args(output)
+ element = NinjaBuildElement(
+ self.all_outputs, [output],
+ self.compiler_to_rule_name(cython),
+ [ssrc])
+ element.add_item('ARGS', args)
+ self.add_build(element)
+ # TODO: introspection?
+ cython_sources.append(output)
+ else:
+ generated_sources[ssrc] = mesonlib.File.from_built_file(gen.get_subdir(), ssrc)
+
+ return static_sources, generated_sources, cython_sources
+
+ def generate_rust_target(self, target: build.BuildTarget) -> None:
+ rustc = target.compilers['rust']
+ # Rust compiler takes only the main file as input and
+ # figures out what other files are needed via import
+ # statements and magic.
+ base_proxy = self.get_base_options_for_target(target)
+ args = rustc.compiler_args()
+ # Compiler args for compiling this target
+ args += compilers.get_base_compile_args(base_proxy, rustc)
+ self.generate_generator_list_rules(target)
+
+ # dependencies need to cause a relink, they're not just for odering
+ deps = [os.path.join(t.subdir, t.get_filename()) for t in target.link_targets]
+
+ orderdeps: T.List[str] = []
+
+ main_rust_file = None
+ for i in target.get_sources():
+ if not rustc.can_compile(i):
+ raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.')
+ if main_rust_file is None:
+ main_rust_file = i.rel_to_builddir(self.build_to_src)
+ for g in target.get_generated_sources():
+ for i in g.get_outputs():
+ if not rustc.can_compile(i):
+ raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.')
+ if isinstance(g, GeneratedList):
+ fname = os.path.join(self.get_target_private_dir(target), i)
+ else:
+ fname = os.path.join(g.get_subdir(), i)
+ if main_rust_file is None:
+ main_rust_file = fname
+ orderdeps.append(fname)
+ if main_rust_file is None:
+ raise RuntimeError('A Rust target has no Rust sources. This is weird. Also a bug. Please report')
+ target_name = os.path.join(target.subdir, target.get_filename())
+ if isinstance(target, build.Executable):
+ cratetype = 'bin'
+ elif hasattr(target, 'rust_crate_type'):
+ cratetype = target.rust_crate_type
+ elif isinstance(target, build.SharedLibrary):
+ cratetype = 'dylib'
+ elif isinstance(target, build.StaticLibrary):
+ cratetype = 'rlib'
+ else:
+ raise InvalidArguments('Unknown target type for rustc.')
+ args.extend(['--crate-type', cratetype])
+
+ # If we're dynamically linking, add those arguments
+ #
+ # Rust is super annoying, calling -C link-arg foo does not work, it has
+ # to be -C link-arg=foo
+ if cratetype in {'bin', 'dylib'}:
+ args.extend(rustc.get_linker_always_args())
+
+ opt_proxy = self.get_compiler_options_for_target(target)
+
+ args += ['--crate-name', target.name]
+ args += rustc.get_buildtype_args(self.get_option_for_target(OptionKey('buildtype'), target))
+ args += rustc.get_debug_args(self.get_option_for_target(OptionKey('debug'), target))
+ args += rustc.get_optimization_args(self.get_option_for_target(OptionKey('optimization'), target))
+ args += rustc.get_option_compile_args(opt_proxy)
+ args += self.build.get_global_args(rustc, target.for_machine)
+ args += self.build.get_project_args(rustc, target.subproject, target.for_machine)
+ depfile = os.path.join(target.subdir, target.name + '.d')
+ args += ['--emit', f'dep-info={depfile}', '--emit', 'link']
+ args += target.get_extra_args('rust')
+ args += rustc.get_output_args(os.path.join(target.subdir, target.get_filename()))
+ args += self.environment.coredata.get_external_args(target.for_machine, rustc.language)
+ linkdirs = mesonlib.OrderedSet()
+ external_deps = target.external_deps.copy()
+ for d in target.link_targets:
+ linkdirs.add(d.subdir)
+ if d.uses_rust():
+ # specify `extern CRATE_NAME=OUTPUT_FILE` for each Rust
+ # dependency, so that collisions with libraries in rustc's
+ # sysroot don't cause ambiguity
+ args += ['--extern', '{}={}'.format(d.name, os.path.join(d.subdir, d.filename))]
+ elif d.typename == 'static library':
+ # Rustc doesn't follow Meson's convention that static libraries
+ # are called .a, and import libraries are .lib, so we have to
+ # manually handle that.
+ if rustc.linker.id in {'link', 'lld-link'}:
+ args += ['-C', f'link-arg={self.get_target_filename_for_linking(d)}']
+ else:
+ args += ['-l', f'static={d.name}']
+ external_deps.extend(d.external_deps)
+ else:
+ # Rust uses -l for non rust dependencies, but we still need to
+ # add dylib=foo
+ args += ['-l', f'dylib={d.name}']
+ for e in external_deps:
+ for a in e.get_link_args():
+ if a.endswith(('.dll', '.so', '.dylib')):
+ dir_, lib = os.path.split(a)
+ linkdirs.add(dir_)
+ lib, ext = os.path.splitext(lib)
+ if lib.startswith('lib'):
+ lib = lib[3:]
+ args.extend(['-l', f'dylib={lib}'])
+ elif a.startswith('-L'):
+ args.append(a)
+ elif a.startswith('-l'):
+ _type = 'static' if e.static else 'dylib'
+ args.extend(['-l', f'{_type}={a[2:]}'])
+ for d in linkdirs:
+ if d == '':
+ d = '.'
+ args += ['-L', d]
+ has_shared_deps = any(isinstance(dep, build.SharedLibrary) for dep in target.get_dependencies())
+ if isinstance(target, build.SharedLibrary) or has_shared_deps:
+ # add prefer-dynamic if any of the Rust libraries we link
+ # against are dynamic, otherwise we'll end up with
+ # multiple implementations of crates
+ args += ['-C', 'prefer-dynamic']
+
+ # build the usual rpath arguments as well...
+
+ # Set runtime-paths so we can run executables without needing to set
+ # LD_LIBRARY_PATH, etc in the environment. Doesn't work on Windows.
+ if has_path_sep(target.name):
+ # Target names really should not have slashes in them, but
+ # unfortunately we did not check for that and some downstream projects
+ # now have them. Once slashes are forbidden, remove this bit.
+ target_slashname_workaround_dir = os.path.join(os.path.dirname(target.name),
+ self.get_target_dir(target))
+ else:
+ target_slashname_workaround_dir = self.get_target_dir(target)
+ rpath_args, target.rpath_dirs_to_remove = (
+ rustc.build_rpath_args(self.environment,
+ self.environment.get_build_dir(),
+ target_slashname_workaround_dir,
+ self.determine_rpath_dirs(target),
+ target.build_rpath,
+ target.install_rpath))
+ # ... but then add rustc's sysroot to account for rustup
+ # installations
+ for rpath_arg in rpath_args:
+ args += ['-C', 'link-arg=' + rpath_arg + ':' + os.path.join(rustc.get_sysroot(), 'lib')]
+ compiler_name = self.get_compiler_rule_name('rust', target.for_machine)
+ element = NinjaBuildElement(self.all_outputs, target_name, compiler_name, main_rust_file)
+ if orderdeps:
+ element.add_orderdep(orderdeps)
+ if deps:
+ element.add_dep(deps)
+ element.add_item('ARGS', args)
+ element.add_item('targetdep', depfile)
+ element.add_item('cratetype', cratetype)
+ self.add_build(element)
+ if isinstance(target, build.SharedLibrary):
+ self.generate_shsym(target)
+ self.create_target_source_introspection(target, rustc, args, [main_rust_file], [])
+
+ @staticmethod
+ def get_rule_suffix(for_machine: MachineChoice) -> str:
+ return PerMachine('_FOR_BUILD', '')[for_machine]
+
+ @classmethod
+ def get_compiler_rule_name(cls, lang: str, for_machine: MachineChoice) -> str:
+ return '{}_COMPILER{}'.format(lang, cls.get_rule_suffix(for_machine))
+
+ @classmethod
+ def get_pch_rule_name(cls, lang: str, for_machine: MachineChoice) -> str:
+ return '{}_PCH{}'.format(lang, cls.get_rule_suffix(for_machine))
+
+ @classmethod
+ def compiler_to_rule_name(cls, compiler: Compiler) -> str:
+ return cls.get_compiler_rule_name(compiler.get_language(), compiler.for_machine)
+
+ @classmethod
+ def compiler_to_pch_rule_name(cls, compiler: Compiler) -> str:
+ return cls.get_pch_rule_name(compiler.get_language(), compiler.for_machine)
+
+ def swift_module_file_name(self, target):
+ return os.path.join(self.get_target_private_dir(target),
+ self.target_swift_modulename(target) + '.swiftmodule')
+
+ def target_swift_modulename(self, target):
+ return target.name
+
+ def determine_swift_dep_modules(self, target):
+ result = []
+ for l in target.link_targets:
+ if self.is_swift_target(l):
+ result.append(self.swift_module_file_name(l))
+ return result
+
+ def get_swift_link_deps(self, target):
+ result = []
+ for l in target.link_targets:
+ result.append(self.get_target_filename(l))
+ return result
+
+ def split_swift_generated_sources(self, target):
+ all_srcs = self.get_target_generated_sources(target)
+ srcs = []
+ others = []
+ for i in all_srcs:
+ if i.endswith('.swift'):
+ srcs.append(i)
+ else:
+ others.append(i)
+ return srcs, others
+
+ def generate_swift_target(self, target):
+ module_name = self.target_swift_modulename(target)
+ swiftc = target.compilers['swift']
+ abssrc = []
+ relsrc = []
+ abs_headers = []
+ header_imports = []
+ for i in target.get_sources():
+ if swiftc.can_compile(i):
+ rels = i.rel_to_builddir(self.build_to_src)
+ abss = os.path.normpath(os.path.join(self.environment.get_build_dir(), rels))
+ relsrc.append(rels)
+ abssrc.append(abss)
+ elif self.environment.is_header(i):
+ relh = i.rel_to_builddir(self.build_to_src)
+ absh = os.path.normpath(os.path.join(self.environment.get_build_dir(), relh))
+ abs_headers.append(absh)
+ header_imports += swiftc.get_header_import_args(absh)
+ else:
+ raise InvalidArguments(f'Swift target {target.get_basename()} contains a non-swift source file.')
+ os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True)
+ compile_args = swiftc.get_compile_only_args()
+ compile_args += swiftc.get_optimization_args(self.get_option_for_target(OptionKey('optimization'), target))
+ compile_args += swiftc.get_debug_args(self.get_option_for_target(OptionKey('debug'), target))
+ compile_args += swiftc.get_module_args(module_name)
+ compile_args += self.build.get_project_args(swiftc, target.subproject, target.for_machine)
+ compile_args += self.build.get_global_args(swiftc, target.for_machine)
+ for i in reversed(target.get_include_dirs()):
+ basedir = i.get_curdir()
+ for d in i.get_incdirs():
+ if d not in ('', '.'):
+ expdir = os.path.join(basedir, d)
+ else:
+ expdir = basedir
+ srctreedir = os.path.normpath(os.path.join(self.environment.get_build_dir(), self.build_to_src, expdir))
+ sargs = swiftc.get_include_args(srctreedir, False)
+ compile_args += sargs
+ link_args = swiftc.get_output_args(os.path.join(self.environment.get_build_dir(), self.get_target_filename(target)))
+ link_args += self.build.get_project_link_args(swiftc, target.subproject, target.for_machine)
+ link_args += self.build.get_global_link_args(swiftc, target.for_machine)
+ rundir = self.get_target_private_dir(target)
+ out_module_name = self.swift_module_file_name(target)
+ in_module_files = self.determine_swift_dep_modules(target)
+ abs_module_dirs = self.determine_swift_dep_dirs(target)
+ module_includes = []
+ for x in abs_module_dirs:
+ module_includes += swiftc.get_include_args(x, False)
+ link_deps = self.get_swift_link_deps(target)
+ abs_link_deps = [os.path.join(self.environment.get_build_dir(), x) for x in link_deps]
+ for d in target.link_targets:
+ reldir = self.get_target_dir(d)
+ if reldir == '':
+ reldir = '.'
+ link_args += ['-L', os.path.normpath(os.path.join(self.environment.get_build_dir(), reldir))]
+ (rel_generated, _) = self.split_swift_generated_sources(target)
+ abs_generated = [os.path.join(self.environment.get_build_dir(), x) for x in rel_generated]
+ # We need absolute paths because swiftc needs to be invoked in a subdir
+ # and this is the easiest way about it.
+ objects = [] # Relative to swift invocation dir
+ rel_objects = [] # Relative to build.ninja
+ for i in abssrc + abs_generated:
+ base = os.path.basename(i)
+ oname = os.path.splitext(base)[0] + '.o'
+ objects.append(oname)
+ rel_objects.append(os.path.join(self.get_target_private_dir(target), oname))
+
+ rulename = self.get_compiler_rule_name('swift', target.for_machine)
+
+ # Swiftc does not seem to be able to emit objects and module files in one go.
+ elem = NinjaBuildElement(self.all_outputs, rel_objects, rulename, abssrc)
+ elem.add_dep(in_module_files + rel_generated)
+ elem.add_dep(abs_headers)
+ elem.add_item('ARGS', compile_args + header_imports + abs_generated + module_includes)
+ elem.add_item('RUNDIR', rundir)
+ self.add_build(elem)
+ elem = NinjaBuildElement(self.all_outputs, out_module_name,
+ self.get_compiler_rule_name('swift', target.for_machine),
+ abssrc)
+ elem.add_dep(in_module_files + rel_generated)
+ elem.add_item('ARGS', compile_args + abs_generated + module_includes + swiftc.get_mod_gen_args())
+ elem.add_item('RUNDIR', rundir)
+ self.add_build(elem)
+ if isinstance(target, build.StaticLibrary):
+ elem = self.generate_link(target, self.get_target_filename(target),
+ rel_objects, self.build.static_linker[target.for_machine])
+ self.add_build(elem)
+ elif isinstance(target, build.Executable):
+ elem = NinjaBuildElement(self.all_outputs, self.get_target_filename(target), rulename, [])
+ elem.add_dep(rel_objects)
+ elem.add_dep(link_deps)
+ elem.add_item('ARGS', link_args + swiftc.get_std_exe_link_args() + objects + abs_link_deps)
+ elem.add_item('RUNDIR', rundir)
+ self.add_build(elem)
+ else:
+ raise MesonException('Swift supports only executable and static library targets.')
+ # Introspection information
+ self.create_target_source_introspection(target, swiftc, compile_args + header_imports + module_includes, relsrc, rel_generated)
+
+ def _rsp_options(self, tool: T.Union['Compiler', 'StaticLinker', 'DynamicLinker']) -> T.Dict[str, T.Union[bool, RSPFileSyntax]]:
+ """Helper method to get rsp options.
+
+ rsp_file_syntax() is only guaranteed to be implemented if
+ can_linker_accept_rsp() returns True.
+ """
+ options = dict(rspable=tool.can_linker_accept_rsp())
+ if options['rspable']:
+ options['rspfile_quote_style'] = tool.rsp_file_syntax()
+ return options
+
+ def generate_static_link_rules(self):
+ num_pools = self.environment.coredata.options[OptionKey('backend_max_links')].value
+ if 'java' in self.environment.coredata.compilers.host:
+ self.generate_java_link()
+ for for_machine in MachineChoice:
+ static_linker = self.build.static_linker[for_machine]
+ if static_linker is None:
+ continue
+ rule = 'STATIC_LINKER{}'.format(self.get_rule_suffix(for_machine))
+ cmdlist = []
+ args = ['$in']
+ # FIXME: Must normalize file names with pathlib.Path before writing
+ # them out to fix this properly on Windows. See:
+ # https://github.com/mesonbuild/meson/issues/1517
+ # https://github.com/mesonbuild/meson/issues/1526
+ if isinstance(static_linker, ArLinker) and not mesonlib.is_windows():
+ # `ar` has no options to overwrite archives. It always appends,
+ # which is never what we want. Delete an existing library first if
+ # it exists. https://github.com/mesonbuild/meson/issues/1355
+ cmdlist = execute_wrapper + [c.format('$out') for c in rmfile_prefix]
+ cmdlist += static_linker.get_exelist()
+ cmdlist += ['$LINK_ARGS']
+ cmdlist += NinjaCommandArg.list(static_linker.get_output_args('$out'), Quoting.none)
+ description = 'Linking static target $out'
+ if num_pools > 0:
+ pool = 'pool = link_pool'
+ else:
+ pool = None
+
+ options = self._rsp_options(static_linker)
+ self.add_rule(NinjaRule(rule, cmdlist, args, description, **options, extra=pool))
+
+ def generate_dynamic_link_rules(self):
+ num_pools = self.environment.coredata.options[OptionKey('backend_max_links')].value
+ for for_machine in MachineChoice:
+ complist = self.environment.coredata.compilers[for_machine]
+ for langname, compiler in complist.items():
+ if langname in {'java', 'vala', 'rust', 'cs', 'cython'}:
+ continue
+ rule = '{}_LINKER{}'.format(langname, self.get_rule_suffix(for_machine))
+ command = compiler.get_linker_exelist()
+ args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_linker_output_args('$out'), Quoting.none) + ['$in', '$LINK_ARGS']
+ description = 'Linking target $out'
+ if num_pools > 0:
+ pool = 'pool = link_pool'
+ else:
+ pool = None
+
+ options = self._rsp_options(compiler)
+ self.add_rule(NinjaRule(rule, command, args, description, **options, extra=pool))
+
+ args = self.environment.get_build_command() + \
+ ['--internal',
+ 'symbolextractor',
+ self.environment.get_build_dir(),
+ '$in',
+ '$IMPLIB',
+ '$out']
+ symrule = 'SHSYM'
+ symcmd = args + ['$CROSS']
+ syndesc = 'Generating symbol file $out'
+ synstat = 'restat = 1'
+ self.add_rule(NinjaRule(symrule, symcmd, [], syndesc, extra=synstat))
+
+ def generate_java_compile_rule(self, compiler):
+ rule = self.compiler_to_rule_name(compiler)
+ command = compiler.get_exelist() + ['$ARGS', '$in']
+ description = 'Compiling Java object $in'
+ self.add_rule(NinjaRule(rule, command, [], description))
+
+ def generate_cs_compile_rule(self, compiler: 'CsCompiler') -> None:
+ rule = self.compiler_to_rule_name(compiler)
+ command = compiler.get_exelist()
+ args = ['$ARGS', '$in']
+ description = 'Compiling C Sharp target $out'
+ self.add_rule(NinjaRule(rule, command, args, description,
+ rspable=mesonlib.is_windows(),
+ rspfile_quote_style=compiler.rsp_file_syntax()))
+
+ def generate_vala_compile_rules(self, compiler):
+ rule = self.compiler_to_rule_name(compiler)
+ command = compiler.get_exelist() + ['$ARGS', '$in']
+ description = 'Compiling Vala source $in'
+ self.add_rule(NinjaRule(rule, command, [], description, extra='restat = 1'))
+
+ def generate_cython_compile_rules(self, compiler: 'Compiler') -> None:
+ rule = self.compiler_to_rule_name(compiler)
+ command = compiler.get_exelist() + ['$ARGS', '$in']
+ description = 'Compiling Cython source $in'
+ self.add_rule(NinjaRule(rule, command, [], description, extra='restat = 1'))
+
+ def generate_rust_compile_rules(self, compiler):
+ rule = self.compiler_to_rule_name(compiler)
+ command = compiler.get_exelist() + ['$ARGS', '$in']
+ description = 'Compiling Rust source $in'
+ depfile = '$targetdep'
+ depstyle = 'gcc'
+ self.add_rule(NinjaRule(rule, command, [], description, deps=depstyle,
+ depfile=depfile))
+
+ def generate_swift_compile_rules(self, compiler):
+ rule = self.compiler_to_rule_name(compiler)
+ full_exe = self.environment.get_build_command() + [
+ '--internal',
+ 'dirchanger',
+ '$RUNDIR',
+ ]
+ invoc = full_exe + compiler.get_exelist()
+ command = invoc + ['$ARGS', '$in']
+ description = 'Compiling Swift source $in'
+ self.add_rule(NinjaRule(rule, command, [], description))
+
+ def use_dyndeps_for_fortran(self) -> bool:
+ '''Use the new Ninja feature for scanning dependencies during build,
+ rather than up front. Remove this and all old scanning code once Ninja
+ minimum version is bumped to 1.10.'''
+ return mesonlib.version_compare(self.ninja_version, '>=1.10.0')
+
+ def generate_fortran_dep_hack(self, crstr: str) -> None:
+ if self.use_dyndeps_for_fortran():
+ return
+ rule = f'FORTRAN_DEP_HACK{crstr}'
+ if mesonlib.is_windows():
+ cmd = ['cmd', '/C']
+ else:
+ cmd = ['true']
+ self.add_rule_comment(NinjaComment('''Workaround for these issues:
+https://groups.google.com/forum/#!topic/ninja-build/j-2RfBIOd_8
+https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
+ self.add_rule(NinjaRule(rule, cmd, [], 'Dep hack', extra='restat = 1'))
+
+ def generate_llvm_ir_compile_rule(self, compiler):
+ if self.created_llvm_ir_rule[compiler.for_machine]:
+ return
+ rule = self.get_compiler_rule_name('llvm_ir', compiler.for_machine)
+ command = compiler.get_exelist()
+ args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in']
+ description = 'Compiling LLVM IR object $in'
+
+ options = self._rsp_options(compiler)
+
+ self.add_rule(NinjaRule(rule, command, args, description, **options))
+ self.created_llvm_ir_rule[compiler.for_machine] = True
+
+ def generate_compile_rule_for(self, langname, compiler):
+ if langname == 'java':
+ if self.environment.machines.matches_build_machine(compiler.for_machine):
+ self.generate_java_compile_rule(compiler)
+ return
+ if langname == 'cs':
+ if self.environment.machines.matches_build_machine(compiler.for_machine):
+ self.generate_cs_compile_rule(compiler)
+ return
+ if langname == 'vala':
+ self.generate_vala_compile_rules(compiler)
+ return
+ if langname == 'rust':
+ self.generate_rust_compile_rules(compiler)
+ return
+ if langname == 'swift':
+ if self.environment.machines.matches_build_machine(compiler.for_machine):
+ self.generate_swift_compile_rules(compiler)
+ return
+ if langname == 'cython':
+ self.generate_cython_compile_rules(compiler)
+ return
+ crstr = self.get_rule_suffix(compiler.for_machine)
+ if langname == 'fortran':
+ self.generate_fortran_dep_hack(crstr)
+ rule = self.get_compiler_rule_name(langname, compiler.for_machine)
+ depargs = NinjaCommandArg.list(compiler.get_dependency_gen_args('$out', '$DEPFILE'), Quoting.none)
+ command = compiler.get_exelist()
+ args = ['$ARGS'] + depargs + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in']
+ description = f'Compiling {compiler.get_display_language()} object $out'
+ if isinstance(compiler, VisualStudioLikeCompiler):
+ deps = 'msvc'
+ depfile = None
+ else:
+ deps = 'gcc'
+ depfile = '$DEPFILE'
+ options = self._rsp_options(compiler)
+ self.add_rule(NinjaRule(rule, command, args, description, **options,
+ deps=deps, depfile=depfile))
+
+ def generate_pch_rule_for(self, langname, compiler):
+ if langname != 'c' and langname != 'cpp':
+ return
+ rule = self.compiler_to_pch_rule_name(compiler)
+ depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE')
+
+ if isinstance(compiler, VisualStudioLikeCompiler):
+ output = []
+ else:
+ output = NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none)
+ command = compiler.get_exelist() + ['$ARGS'] + depargs + output + compiler.get_compile_only_args() + ['$in']
+ description = 'Precompiling header $in'
+ if isinstance(compiler, VisualStudioLikeCompiler):
+ deps = 'msvc'
+ depfile = None
+ else:
+ deps = 'gcc'
+ depfile = '$DEPFILE'
+ self.add_rule(NinjaRule(rule, command, [], description, deps=deps,
+ depfile=depfile))
+
+
+ def generate_scanner_rules(self):
+ rulename = 'depscan'
+ if rulename in self.ruledict:
+ # Scanning command is the same for native and cross compilation.
+ return
+ command = self.environment.get_build_command() + \
+ ['--internal', 'depscan']
+ args = ['$picklefile', '$out', '$in']
+ description = 'Module scanner.'
+ rule = NinjaRule(rulename, command, args, description)
+ self.add_rule(rule)
+
+
+ def generate_compile_rules(self):
+ for for_machine in MachineChoice:
+ clist = self.environment.coredata.compilers[for_machine]
+ for langname, compiler in clist.items():
+ if compiler.get_id() == 'clang':
+ self.generate_llvm_ir_compile_rule(compiler)
+ self.generate_compile_rule_for(langname, compiler)
+ self.generate_pch_rule_for(langname, compiler)
+
+ def generate_generator_list_rules(self, target):
+ # CustomTargets have already written their rules and
+ # CustomTargetIndexes don't actually get generated, so write rules for
+ # GeneratedLists here
+ for genlist in target.get_generated_sources():
+ if isinstance(genlist, (build.CustomTarget, build.CustomTargetIndex)):
+ continue
+ self.generate_genlist_for_target(genlist, target)
+
+ def replace_paths(self, target, args, override_subdir=None):
+ if override_subdir:
+ source_target_dir = os.path.join(self.build_to_src, override_subdir)
+ else:
+ source_target_dir = self.get_target_source_dir(target)
+ relout = self.get_target_private_dir(target)
+ args = [x.replace("@SOURCE_DIR@", self.build_to_src).replace("@BUILD_DIR@", relout)
+ for x in args]
+ args = [x.replace("@CURRENT_SOURCE_DIR@", source_target_dir) for x in args]
+ args = [x.replace("@SOURCE_ROOT@", self.build_to_src).replace("@BUILD_ROOT@", '.')
+ for x in args]
+ args = [x.replace('\\', '/') for x in args]
+ return args
+
+ def generate_genlist_for_target(self, genlist, target):
+ generator = genlist.get_generator()
+ subdir = genlist.subdir
+ exe = generator.get_exe()
+ exe_arr = self.build_target_to_cmd_array(exe)
+ infilelist = genlist.get_inputs()
+ outfilelist = genlist.get_outputs()
+ extra_dependencies = self.get_custom_target_depend_files(genlist)
+ for i in range(len(infilelist)):
+ curfile = infilelist[i]
+ if len(generator.outputs) == 1:
+ sole_output = os.path.join(self.get_target_private_dir(target), outfilelist[i])
+ else:
+ sole_output = f'{curfile}'
+ infilename = curfile.rel_to_builddir(self.build_to_src)
+ base_args = generator.get_arglist(infilename)
+ outfiles = genlist.get_outputs_for(curfile)
+ outfiles = [os.path.join(self.get_target_private_dir(target), of) for of in outfiles]
+ if generator.depfile is None:
+ rulename = 'CUSTOM_COMMAND'
+ args = base_args
+ else:
+ rulename = 'CUSTOM_COMMAND_DEP'
+ depfilename = generator.get_dep_outname(infilename)
+ depfile = os.path.join(self.get_target_private_dir(target), depfilename)
+ args = [x.replace('@DEPFILE@', depfile) for x in base_args]
+ args = [x.replace("@INPUT@", infilename).replace('@OUTPUT@', sole_output)
+ for x in args]
+ args = self.replace_outputs(args, self.get_target_private_dir(target), outfilelist)
+ # We have consumed output files, so drop them from the list of remaining outputs.
+ if len(generator.outputs) > 1:
+ outfilelist = outfilelist[len(generator.outputs):]
+ args = self.replace_paths(target, args, override_subdir=subdir)
+ cmdlist = exe_arr + self.replace_extra_args(args, genlist)
+ cmdlist, reason = self.as_meson_exe_cmdline('generator ' + cmdlist[0],
+ cmdlist[0], cmdlist[1:],
+ capture=outfiles[0] if generator.capture else None)
+ abs_pdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
+ os.makedirs(abs_pdir, exist_ok=True)
+
+ elem = NinjaBuildElement(self.all_outputs, outfiles, rulename, infilename)
+ elem.add_dep([self.get_target_filename(x) for x in generator.depends])
+ if generator.depfile is not None:
+ elem.add_item('DEPFILE', depfile)
+ if len(extra_dependencies) > 0:
+ elem.add_dep(extra_dependencies)
+
+ if len(generator.outputs) == 1:
+ what = f'{sole_output!r}'
+ else:
+ # since there are multiple outputs, we log the source that caused the rebuild
+ what = f'from {sole_output!r}.'
+ if reason:
+ reason = f' (wrapped by meson {reason})'
+ elem.add_item('DESC', f'Generating {what}{reason}.')
+
+ if isinstance(exe, build.BuildTarget):
+ elem.add_dep(self.get_target_filename(exe))
+ elem.add_item('COMMAND', cmdlist)
+ self.add_build(elem)
+
+ def scan_fortran_module_outputs(self, target):
+ """
+ Find all module and submodule made available in a Fortran code file.
+ """
+ if self.use_dyndeps_for_fortran():
+ return
+ compiler = None
+ # TODO other compilers
+ for lang, c in self.environment.coredata.compilers.host.items():
+ if lang == 'fortran':
+ compiler = c
+ break
+ if compiler is None:
+ self.fortran_deps[target.get_basename()] = {}
+ return
+
+ modre = re.compile(FORTRAN_MODULE_PAT, re.IGNORECASE)
+ submodre = re.compile(FORTRAN_SUBMOD_PAT, re.IGNORECASE)
+ module_files = {}
+ submodule_files = {}
+ for s in target.get_sources():
+ # FIXME, does not work for Fortran sources generated by
+ # custom_target() and generator() as those are run after
+ # the configuration (configure_file() is OK)
+ if not compiler.can_compile(s):
+ continue
+ filename = s.absolute_path(self.environment.get_source_dir(),
+ self.environment.get_build_dir())
+ # Fortran keywords must be ASCII.
+ with open(filename, encoding='ascii', errors='ignore') as f:
+ for line in f:
+ modmatch = modre.match(line)
+ if modmatch is not None:
+ modname = modmatch.group(1).lower()
+ if modname in module_files:
+ raise InvalidArguments(
+ f'Namespace collision: module {modname} defined in '
+ 'two files {module_files[modname]} and {s}.')
+ module_files[modname] = s
+ else:
+ submodmatch = submodre.match(line)
+ if submodmatch is not None:
+ # '_' is arbitrarily used to distinguish submod from mod.
+ parents = submodmatch.group(1).lower().split(':')
+ submodname = parents[0] + '_' + submodmatch.group(2).lower()
+
+ if submodname in submodule_files:
+ raise InvalidArguments(
+ 'Namespace collision: submodule {submodname} defined in '
+ 'two files {submodule_files[submodname]} and {s}.')
+ submodule_files[submodname] = s
+
+ self.fortran_deps[target.get_basename()] = {**module_files, **submodule_files}
+
+ def get_fortran_deps(self, compiler: FortranCompiler, src: Path, target) -> T.List[str]:
+ """
+ Find all module and submodule needed by a Fortran target
+ """
+ if self.use_dyndeps_for_fortran():
+ return []
+
+ dirname = Path(self.get_target_private_dir(target))
+ tdeps = self.fortran_deps[target.get_basename()]
+ srcdir = Path(self.source_dir)
+
+ mod_files = _scan_fortran_file_deps(src, srcdir, dirname, tdeps, compiler)
+ return mod_files
+
+ def get_no_stdlib_args(self, target, compiler):
+ if compiler.language in self.build.stdlibs[target.for_machine]:
+ return compiler.get_no_stdinc_args()
+ return []
+
+ def get_no_stdlib_link_args(self, target, linker):
+ if hasattr(linker, 'language') and linker.language in self.build.stdlibs[target.for_machine]:
+ return linker.get_no_stdlib_link_args()
+ return []
+
+ def get_compile_debugfile_args(self, compiler, target, objfile):
+ # The way MSVC uses PDB files is documented exactly nowhere so
+ # the following is what we have been able to decipher via
+ # reverse engineering.
+ #
+ # Each object file gets the path of its PDB file written
+ # inside it. This can be either the final PDB (for, say,
+ # foo.exe) or an object pdb (for foo.obj). If the former, then
+ # each compilation step locks the pdb file for writing, which
+ # is a bottleneck and object files from one target can not be
+ # used in a different target. The latter seems to be the
+ # sensible one (and what Unix does) but there is a catch. If
+ # you try to use precompiled headers MSVC will error out
+ # because both source and pch pdbs go in the same file and
+ # they must be the same.
+ #
+ # This means:
+ #
+ # - pch files must be compiled anew for every object file (negating
+ # the entire point of having them in the first place)
+ # - when using pch, output must go to the target pdb
+ #
+ # Since both of these are broken in some way, use the one that
+ # works for each target. This unfortunately means that you
+ # can't combine pch and object extraction in a single target.
+ #
+ # PDB files also lead to filename collisions. A target foo.exe
+ # has a corresponding foo.pdb. A shared library foo.dll _also_
+ # has pdb file called foo.pdb. So will a static library
+ # foo.lib, which clobbers both foo.pdb _and_ the dll file's
+ # export library called foo.lib (by default, currently we name
+ # them libfoo.a to avoidt this issue). You can give the files
+ # unique names such as foo_exe.pdb but VC also generates a
+ # bunch of other files which take their names from the target
+ # basename (i.e. "foo") and stomp on each other.
+ #
+ # CMake solves this problem by doing two things. First of all
+ # static libraries do not generate pdb files at
+ # all. Presumably you don't need them and VC is smart enough
+ # to look up the original data when linking (speculation, not
+ # tested). The second solution is that you can only have
+ # target named "foo" as an exe, shared lib _or_ static
+ # lib. This makes filename collisions not happen. The downside
+ # is that you can't have an executable foo that uses a shared
+ # library libfoo.so, which is a common idiom on Unix.
+ #
+ # If you feel that the above is completely wrong and all of
+ # this is actually doable, please send patches.
+
+ if target.has_pch():
+ tfilename = self.get_target_filename_abs(target)
+ return compiler.get_compile_debugfile_args(tfilename, pch=True)
+ else:
+ return compiler.get_compile_debugfile_args(objfile, pch=False)
+
+ def get_link_debugfile_name(self, linker, target, outname):
+ return linker.get_link_debugfile_name(outname)
+
+ def get_link_debugfile_args(self, linker, target, outname):
+ return linker.get_link_debugfile_args(outname)
+
+ def generate_llvm_ir_compile(self, target, src):
+ base_proxy = self.get_base_options_for_target(target)
+ compiler = get_compiler_for_source(target.compilers.values(), src)
+ commands = compiler.compiler_args()
+ # Compiler args for compiling this target
+ commands += compilers.get_base_compile_args(base_proxy, compiler)
+ if isinstance(src, File):
+ if src.is_built:
+ src_filename = os.path.join(src.subdir, src.fname)
+ else:
+ src_filename = src.fname
+ elif os.path.isabs(src):
+ src_filename = os.path.basename(src)
+ else:
+ src_filename = src
+ obj_basename = self.canonicalize_filename(src_filename)
+ rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename)
+ rel_obj += '.' + self.environment.machines[target.for_machine].get_object_suffix()
+ commands += self.get_compile_debugfile_args(compiler, target, rel_obj)
+ if isinstance(src, File) and src.is_built:
+ rel_src = src.fname
+ elif isinstance(src, File):
+ rel_src = src.rel_to_builddir(self.build_to_src)
+ else:
+ raise InvalidArguments(f'Invalid source type: {src!r}')
+ # Write the Ninja build command
+ compiler_name = self.get_compiler_rule_name('llvm_ir', compiler.for_machine)
+ element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src)
+ element.add_item('ARGS', commands)
+ self.add_build(element)
+ return (rel_obj, rel_src)
+
+ @lru_cache(maxsize=None)
+ def generate_inc_dir(self, compiler: 'Compiler', d: str, basedir: str, is_system: bool) -> \
+ T.Tuple['ImmutableListProtocol[str]', 'ImmutableListProtocol[str]']:
+ # Avoid superfluous '/.' at the end of paths when d is '.'
+ if d not in ('', '.'):
+ expdir = os.path.normpath(os.path.join(basedir, d))
+ else:
+ expdir = basedir
+ srctreedir = os.path.normpath(os.path.join(self.build_to_src, expdir))
+ sargs = compiler.get_include_args(srctreedir, is_system)
+ # There may be include dirs where a build directory has not been
+ # created for some source dir. For example if someone does this:
+ #
+ # inc = include_directories('foo/bar/baz')
+ #
+ # But never subdir()s into the actual dir.
+ if os.path.isdir(os.path.join(self.environment.get_build_dir(), expdir)):
+ bargs = compiler.get_include_args(expdir, is_system)
+ else:
+ bargs = []
+ return (sargs, bargs)
+
+ def _generate_single_compile(self, target: build.BuildTarget, compiler: 'Compiler',
+ is_generated: bool = False) -> 'CompilerArgs':
+ commands = self._generate_single_compile_base_args(target, compiler)
+ commands += self._generate_single_compile_target_args(target, compiler, is_generated)
+ return commands
+
+ def _generate_single_compile_base_args(self, target: build.BuildTarget, compiler: 'Compiler') -> 'CompilerArgs':
+ base_proxy = self.get_base_options_for_target(target)
+ # Create an empty commands list, and start adding arguments from
+ # various sources in the order in which they must override each other
+ commands = compiler.compiler_args()
+ # Start with symbol visibility.
+ commands += compiler.gnu_symbol_visibility_args(target.gnu_symbol_visibility)
+ # Add compiler args for compiling this target derived from 'base' build
+ # options passed on the command-line, in default_options, etc.
+ # These have the lowest priority.
+ commands += compilers.get_base_compile_args(base_proxy,
+ compiler)
+ return commands
+
+ @lru_cache(maxsize=None)
+ def _generate_single_compile_target_args(self, target: build.BuildTarget, compiler: 'Compiler',
+ is_generated: bool = False) -> 'ImmutableListProtocol[str]':
+ # The code generated by valac is usually crap and has tons of unused
+ # variables and such, so disable warnings for Vala C sources.
+ no_warn_args = (is_generated == 'vala')
+ # Add compiler args and include paths from several sources; defaults,
+ # build options, external dependencies, etc.
+ commands = self.generate_basic_compiler_args(target, compiler, no_warn_args)
+ # Add custom target dirs as includes automatically, but before
+ # target-specific include directories.
+ if target.implicit_include_directories:
+ commands += self.get_custom_target_dir_include_args(target, compiler)
+ # Add include dirs from the `include_directories:` kwarg on the target
+ # and from `include_directories:` of internal deps of the target.
+ #
+ # Target include dirs should override internal deps include dirs.
+ # This is handled in BuildTarget.process_kwargs()
+ #
+ # Include dirs from internal deps should override include dirs from
+ # external deps and must maintain the order in which they are specified.
+ # Hence, we must reverse the list so that the order is preserved.
+ for i in reversed(target.get_include_dirs()):
+ basedir = i.get_curdir()
+ # We should iterate include dirs in reversed orders because
+ # -Ipath will add to begin of array. And without reverse
+ # flags will be added in reversed order.
+ for d in reversed(i.get_incdirs()):
+ # Add source subdir first so that the build subdir overrides it
+ (compile_obj, includeargs) = self.generate_inc_dir(compiler, d, basedir, i.is_system)
+ commands += compile_obj
+ commands += includeargs
+ for d in i.get_extra_build_dirs():
+ commands += compiler.get_include_args(d, i.is_system)
+ # Add per-target compile args, f.ex, `c_args : ['-DFOO']`. We set these
+ # near the end since these are supposed to override everything else.
+ commands += self.escape_extra_args(compiler,
+ target.get_extra_args(compiler.get_language()))
+
+ # D specific additional flags
+ if compiler.language == 'd':
+ commands += compiler.get_feature_args(target.d_features, self.build_to_src)
+
+ # Add source dir and build dir. Project-specific and target-specific
+ # include paths must override per-target compile args, include paths
+ # from external dependencies, internal dependencies, and from
+ # per-target `include_directories:`
+ #
+ # We prefer headers in the build dir over the source dir since, for
+ # instance, the user might have an srcdir == builddir Autotools build
+ # in their source tree. Many projects that are moving to Meson have
+ # both Meson and Autotools in parallel as part of the transition.
+ if target.implicit_include_directories:
+ commands += self.get_source_dir_include_args(target, compiler)
+ if target.implicit_include_directories:
+ commands += self.get_build_dir_include_args(target, compiler)
+ # Finally add the private dir for the target to the include path. This
+ # must override everything else and must be the final path added.
+ commands += compiler.get_include_args(self.get_target_private_dir(target), False)
+ return commands
+
+ def generate_single_compile(self, target, src, is_generated=False, header_deps=None, order_deps=None):
+ """
+ Compiles C/C++, ObjC/ObjC++, Fortran, and D sources
+ """
+ header_deps = header_deps if header_deps is not None else []
+ order_deps = order_deps if order_deps is not None else []
+
+ if isinstance(src, str) and src.endswith('.h'):
+ raise AssertionError(f'BUG: sources should not contain headers {src!r}')
+
+ compiler = get_compiler_for_source(target.compilers.values(), src)
+ commands = self._generate_single_compile_base_args(target, compiler)
+
+ # Include PCH header as first thing as it must be the first one or it will be
+ # ignored by gcc https://gcc.gnu.org/bugzilla/show_bug.cgi?id=100462
+ if self.environment.coredata.options.get(OptionKey('b_pch')) and is_generated != 'pch':
+ commands += self.get_pch_include_args(compiler, target)
+
+ commands += self._generate_single_compile_target_args(target, compiler, is_generated)
+ commands = commands.compiler.compiler_args(commands)
+
+ # Create introspection information
+ if is_generated is False:
+ self.create_target_source_introspection(target, compiler, commands, [src], [])
+ else:
+ self.create_target_source_introspection(target, compiler, commands, [], [src])
+
+ build_dir = self.environment.get_build_dir()
+ if isinstance(src, File):
+ rel_src = src.rel_to_builddir(self.build_to_src)
+ if os.path.isabs(rel_src):
+ # Source files may not be from the source directory if they originate in source-only libraries,
+ # so we can't assert that the absolute path is anywhere in particular.
+ if src.is_built:
+ assert rel_src.startswith(build_dir)
+ rel_src = rel_src[len(build_dir) + 1:]
+ elif is_generated:
+ raise AssertionError(f'BUG: broken generated source file handling for {src!r}')
+ else:
+ raise InvalidArguments(f'Invalid source type: {src!r}')
+ obj_basename = self.object_filename_from_source(target, src)
+ rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename)
+ dep_file = compiler.depfile_for_object(rel_obj)
+
+ # Add MSVC debug file generation compile flags: /Fd /FS
+ commands += self.get_compile_debugfile_args(compiler, target, rel_obj)
+
+ # PCH handling
+ if self.environment.coredata.options.get(OptionKey('b_pch')):
+ pchlist = target.get_pch(compiler.language)
+ else:
+ pchlist = []
+ if not pchlist:
+ pch_dep = []
+ elif compiler.id == 'intel':
+ pch_dep = []
+ else:
+ arr = []
+ i = os.path.join(self.get_target_private_dir(target), compiler.get_pch_name(pchlist[0]))
+ arr.append(i)
+ pch_dep = arr
+
+ compiler_name = self.compiler_to_rule_name(compiler)
+ extra_deps = []
+ if compiler.get_language() == 'fortran':
+ # Can't read source file to scan for deps if it's generated later
+ # at build-time. Skip scanning for deps, and just set the module
+ # outdir argument instead.
+ # https://github.com/mesonbuild/meson/issues/1348
+ if not is_generated:
+ abs_src = Path(build_dir) / rel_src
+ extra_deps += self.get_fortran_deps(compiler, abs_src, target)
+ if not self.use_dyndeps_for_fortran():
+ # Dependency hack. Remove once multiple outputs in Ninja is fixed:
+ # https://groups.google.com/forum/#!topic/ninja-build/j-2RfBIOd_8
+ for modname, srcfile in self.fortran_deps[target.get_basename()].items():
+ modfile = os.path.join(self.get_target_private_dir(target),
+ compiler.module_name_to_filename(modname))
+
+ if srcfile == src:
+ crstr = self.get_rule_suffix(target.for_machine)
+ depelem = NinjaBuildElement(self.all_outputs,
+ modfile,
+ 'FORTRAN_DEP_HACK' + crstr,
+ rel_obj)
+ self.add_build(depelem)
+ commands += compiler.get_module_outdir_args(self.get_target_private_dir(target))
+
+ element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src)
+ self.add_header_deps(target, element, header_deps)
+ for d in extra_deps:
+ element.add_dep(d)
+ for d in order_deps:
+ if isinstance(d, File):
+ d = d.rel_to_builddir(self.build_to_src)
+ elif not self.has_dir_part(d):
+ d = os.path.join(self.get_target_private_dir(target), d)
+ element.add_orderdep(d)
+ element.add_dep(pch_dep)
+ for i in self.get_fortran_orderdeps(target, compiler):
+ element.add_orderdep(i)
+ element.add_item('DEPFILE', dep_file)
+ element.add_item('ARGS', commands)
+
+ self.add_dependency_scanner_entries_to_element(target, compiler, element, src)
+ self.add_build(element)
+ assert(isinstance(rel_obj, str))
+ assert(isinstance(rel_src, str))
+ return (rel_obj, rel_src.replace('\\', '/'))
+
+ def add_dependency_scanner_entries_to_element(self, target, compiler, element, src):
+ if not self.should_use_dyndeps_for_target(target):
+ return
+ extension = os.path.splitext(src.fname)[1][1:]
+ if not (extension.lower() in compilers.lang_suffixes['fortran'] or extension in compilers.lang_suffixes['cpp']):
+ return
+ dep_scan_file = self.get_dep_scan_file_for(target)
+ element.add_item('dyndep', dep_scan_file)
+ element.add_orderdep(dep_scan_file)
+
+ def get_dep_scan_file_for(self, target):
+ return os.path.join(self.get_target_private_dir(target), 'depscan.dd')
+
+ def add_header_deps(self, target, ninja_element, header_deps):
+ for d in header_deps:
+ if isinstance(d, File):
+ d = d.rel_to_builddir(self.build_to_src)
+ elif not self.has_dir_part(d):
+ d = os.path.join(self.get_target_private_dir(target), d)
+ ninja_element.add_dep(d)
+
+ def has_dir_part(self, fname):
+ # FIXME FIXME: The usage of this is a terrible and unreliable hack
+ if isinstance(fname, File):
+ return fname.subdir != ''
+ return has_path_sep(fname)
+
+ # Fortran is a bit weird (again). When you link against a library, just compiling a source file
+ # requires the mod files that are output when single files are built. To do this right we would need to
+ # scan all inputs and write out explicit deps for each file. That is stoo slow and too much effort so
+ # instead just have an ordered dependency on the library. This ensures all required mod files are created.
+ # The real deps are then detected via dep file generation from the compiler. This breaks on compilers that
+ # produce incorrect dep files but such is life.
+ def get_fortran_orderdeps(self, target, compiler):
+ if compiler.language != 'fortran':
+ return []
+ return [
+ os.path.join(self.get_target_dir(lt), lt.get_filename())
+ for lt in itertools.chain(target.link_targets, target.link_whole_targets)
+ ]
+
+ def generate_msvc_pch_command(self, target, compiler, pch):
+ header = pch[0]
+ pchname = compiler.get_pch_name(header)
+ dst = os.path.join(self.get_target_private_dir(target), pchname)
+
+ commands = []
+ commands += self.generate_basic_compiler_args(target, compiler)
+
+ if len(pch) == 1:
+ # Auto generate PCH.
+ source = self.create_msvc_pch_implementation(target, compiler.get_language(), pch[0])
+ pch_header_dir = os.path.dirname(os.path.join(self.build_to_src, target.get_source_subdir(), header))
+ commands += compiler.get_include_args(pch_header_dir, False)
+ else:
+ source = os.path.join(self.build_to_src, target.get_source_subdir(), pch[1])
+
+ just_name = os.path.basename(header)
+ (objname, pch_args) = compiler.gen_pch_args(just_name, source, dst)
+ commands += pch_args
+ commands += self._generate_single_compile(target, compiler)
+ commands += self.get_compile_debugfile_args(compiler, target, objname)
+ dep = dst + '.' + compiler.get_depfile_suffix()
+ return commands, dep, dst, [objname], source
+
+ def generate_gcc_pch_command(self, target, compiler, pch):
+ commands = self._generate_single_compile(target, compiler)
+ if pch.split('.')[-1] == 'h' and compiler.language == 'cpp':
+ # Explicitly compile pch headers as C++. If Clang is invoked in C++ mode, it actually warns if
+ # this option is not set, and for gcc it also makes sense to use it.
+ commands += ['-x', 'c++-header']
+ dst = os.path.join(self.get_target_private_dir(target),
+ os.path.basename(pch) + '.' + compiler.get_pch_suffix())
+ dep = dst + '.' + compiler.get_depfile_suffix()
+ return commands, dep, dst, [] # Gcc does not create an object file during pch generation.
+
+ def generate_pch(self, target, header_deps=None):
+ header_deps = header_deps if header_deps is not None else []
+ pch_objects = []
+ for lang in ['c', 'cpp']:
+ pch = target.get_pch(lang)
+ if not pch:
+ continue
+ if not has_path_sep(pch[0]) or not has_path_sep(pch[-1]):
+ msg = f'Precompiled header of {target.get_basename()!r} must not be in the same ' \
+ 'directory as source, please put it in a subdirectory.'
+ raise InvalidArguments(msg)
+ compiler = target.compilers[lang]
+ if isinstance(compiler, VisualStudioLikeCompiler):
+ (commands, dep, dst, objs, src) = self.generate_msvc_pch_command(target, compiler, pch)
+ extradep = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0])
+ elif compiler.id == 'intel':
+ # Intel generates on target generation
+ continue
+ else:
+ src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0])
+ (commands, dep, dst, objs) = self.generate_gcc_pch_command(target, compiler, pch[0])
+ extradep = None
+ pch_objects += objs
+ rulename = self.compiler_to_pch_rule_name(compiler)
+ elem = NinjaBuildElement(self.all_outputs, dst, rulename, src)
+ if extradep is not None:
+ elem.add_dep(extradep)
+ self.add_header_deps(target, elem, header_deps)
+ elem.add_item('ARGS', commands)
+ elem.add_item('DEPFILE', dep)
+ self.add_build(elem)
+ return pch_objects
+
+ def get_target_shsym_filename(self, target):
+ # Always name the .symbols file after the primary build output because it always exists
+ targetdir = self.get_target_private_dir(target)
+ return os.path.join(targetdir, target.get_filename() + '.symbols')
+
+ def generate_shsym(self, target):
+ target_file = self.get_target_filename(target)
+ symname = self.get_target_shsym_filename(target)
+ elem = NinjaBuildElement(self.all_outputs, symname, 'SHSYM', target_file)
+ # The library we will actually link to, which is an import library on Windows (not the DLL)
+ elem.add_item('IMPLIB', self.get_target_filename_for_linking(target))
+ if self.environment.is_cross_build():
+ elem.add_item('CROSS', '--cross-host=' + self.environment.machines[target.for_machine].system)
+ self.add_build(elem)
+
+ def get_import_filename(self, target):
+ return os.path.join(self.get_target_dir(target), target.import_filename)
+
+ def get_target_type_link_args(self, target, linker):
+ commands = []
+ if isinstance(target, build.Executable):
+ # Currently only used with the Swift compiler to add '-emit-executable'
+ commands += linker.get_std_exe_link_args()
+ # If export_dynamic, add the appropriate linker arguments
+ if target.export_dynamic:
+ commands += linker.gen_export_dynamic_link_args(self.environment)
+ # If implib, and that's significant on this platform (i.e. Windows using either GCC or Visual Studio)
+ if target.import_filename:
+ commands += linker.gen_import_library_args(self.get_import_filename(target))
+ if target.pie:
+ commands += linker.get_pie_link_args()
+ elif isinstance(target, build.SharedLibrary):
+ if isinstance(target, build.SharedModule):
+ options = self.environment.coredata.options
+ commands += linker.get_std_shared_module_link_args(options)
+ else:
+ commands += linker.get_std_shared_lib_link_args()
+ # All shared libraries are PIC
+ commands += linker.get_pic_args()
+ # Add -Wl,-soname arguments on Linux, -install_name on OS X
+ commands += linker.get_soname_args(
+ self.environment, target.prefix, target.name, target.suffix,
+ target.soversion, target.darwin_versions,
+ isinstance(target, build.SharedModule))
+ # This is only visited when building for Windows using either GCC or Visual Studio
+ if target.vs_module_defs and hasattr(linker, 'gen_vs_module_defs_args'):
+ commands += linker.gen_vs_module_defs_args(target.vs_module_defs.rel_to_builddir(self.build_to_src))
+ # This is only visited when building for Windows using either GCC or Visual Studio
+ if target.import_filename:
+ commands += linker.gen_import_library_args(self.get_import_filename(target))
+ elif isinstance(target, build.StaticLibrary):
+ commands += linker.get_std_link_args()
+ else:
+ raise RuntimeError('Unknown build target type.')
+ return commands
+
+ def get_target_type_link_args_post_dependencies(self, target, linker):
+ commands = []
+ if isinstance(target, build.Executable):
+ # If gui_app is significant on this platform, add the appropriate linker arguments.
+ # Unfortunately this can't be done in get_target_type_link_args, because some misguided
+ # libraries (such as SDL2) add -mwindows to their link flags.
+ m = self.environment.machines[target.for_machine]
+
+ if m.is_windows() or m.is_cygwin():
+ if target.gui_app is not None:
+ commands += linker.get_gui_app_args(target.gui_app)
+ else:
+ commands += linker.get_win_subsystem_args(target.win_subsystem)
+ return commands
+
+ def get_link_whole_args(self, linker, target):
+ use_custom = False
+ if isinstance(linker, mixins.visualstudio.MSVCCompiler):
+ # Expand our object lists manually if we are on pre-Visual Studio 2015 Update 2
+ # (incidentally, the "linker" here actually refers to cl.exe)
+ if mesonlib.version_compare(linker.version, '<19.00.23918'):
+ use_custom = True
+
+ if use_custom:
+ objects_from_static_libs: T.List[ExtractedObjects] = []
+ for dep in target.link_whole_targets:
+ l = dep.extract_all_objects(False)
+ objects_from_static_libs += self.determine_ext_objs(l, '')
+ objects_from_static_libs.extend(self.flatten_object_list(dep))
+
+ return objects_from_static_libs
+ else:
+ target_args = self.build_target_link_arguments(linker, target.link_whole_targets)
+ return linker.get_link_whole_for(target_args) if target_args else []
+
+ @lru_cache(maxsize=None)
+ def guess_library_absolute_path(self, linker, libname, search_dirs, patterns) -> Path:
+ for d in search_dirs:
+ for p in patterns:
+ trial = CCompiler._get_trials_from_pattern(p, d, libname)
+ if not trial:
+ continue
+ trial = CCompiler._get_file_from_list(self.environment, trial)
+ if not trial:
+ continue
+ # Return the first result
+ return trial
+
+ def guess_external_link_dependencies(self, linker, target, commands, internal):
+ # Ideally the linker would generate dependency information that could be used.
+ # But that has 2 problems:
+ # * currently ld can not create dependency information in a way that ninja can use:
+ # https://sourceware.org/bugzilla/show_bug.cgi?id=22843
+ # * Meson optimizes libraries from the same build using the symbol extractor.
+ # Just letting ninja use ld generated dependencies would undo this optimization.
+ search_dirs = OrderedSet()
+ libs = OrderedSet()
+ absolute_libs = []
+
+ build_dir = self.environment.get_build_dir()
+ # the following loop sometimes consumes two items from command in one pass
+ it = iter(linker.native_args_to_unix(commands))
+ for item in it:
+ if item in internal and not item.startswith('-'):
+ continue
+
+ if item.startswith('-L'):
+ if len(item) > 2:
+ path = item[2:]
+ else:
+ try:
+ path = next(it)
+ except StopIteration:
+ mlog.warning("Generated linker command has -L argument without following path")
+ break
+ if not os.path.isabs(path):
+ path = os.path.join(build_dir, path)
+ search_dirs.add(path)
+ elif item.startswith('-l'):
+ if len(item) > 2:
+ lib = item[2:]
+ else:
+ try:
+ lib = next(it)
+ except StopIteration:
+ mlog.warning("Generated linker command has '-l' argument without following library name")
+ break
+ libs.add(lib)
+ elif os.path.isabs(item) and self.environment.is_library(item) and os.path.isfile(item):
+ absolute_libs.append(item)
+
+ guessed_dependencies = []
+ # TODO The get_library_naming requirement currently excludes link targets that use d or fortran as their main linker
+ try:
+ static_patterns = linker.get_library_naming(self.environment, LibType.STATIC, strict=True)
+ shared_patterns = linker.get_library_naming(self.environment, LibType.SHARED, strict=True)
+ search_dirs = tuple(search_dirs) + tuple(linker.get_library_dirs(self.environment))
+ for libname in libs:
+ # be conservative and record most likely shared and static resolution, because we don't know exactly
+ # which one the linker will prefer
+ staticlibs = self.guess_library_absolute_path(linker, libname,
+ search_dirs, static_patterns)
+ sharedlibs = self.guess_library_absolute_path(linker, libname,
+ search_dirs, shared_patterns)
+ if staticlibs:
+ guessed_dependencies.append(staticlibs.resolve().as_posix())
+ if sharedlibs:
+ guessed_dependencies.append(sharedlibs.resolve().as_posix())
+ except (mesonlib.MesonException, AttributeError) as e:
+ if 'get_library_naming' not in str(e):
+ raise
+
+ return guessed_dependencies + absolute_libs
+
+ def generate_prelink(self, target, obj_list):
+ assert(isinstance(target, build.StaticLibrary))
+ prelink_name = os.path.join(self.get_target_private_dir(target), target.name + '-prelink.o')
+ elem = NinjaBuildElement(self.all_outputs, [prelink_name], 'CUSTOM_COMMAND', obj_list)
+
+ prelinker = target.get_prelinker()
+ cmd = prelinker.exelist[:]
+ cmd += prelinker.get_prelink_args(prelink_name, obj_list)
+
+ cmd = self.replace_paths(target, cmd)
+ elem.add_item('COMMAND', cmd)
+ elem.add_item('description', f'Prelinking {prelink_name}.')
+ self.add_build(elem)
+ return [prelink_name]
+
+ def generate_link(self, target: build.BuildTarget, outname, obj_list, linker: T.Union['Compiler', 'StaticLinker'], extra_args=None, stdlib_args=None):
+ extra_args = extra_args if extra_args is not None else []
+ stdlib_args = stdlib_args if stdlib_args is not None else []
+ implicit_outs = []
+ if isinstance(target, build.StaticLibrary):
+ linker_base = 'STATIC'
+ else:
+ linker_base = linker.get_language() # Fixme.
+ if isinstance(target, build.SharedLibrary):
+ self.generate_shsym(target)
+ crstr = self.get_rule_suffix(target.for_machine)
+ linker_rule = linker_base + '_LINKER' + crstr
+ # Create an empty commands list, and start adding link arguments from
+ # various sources in the order in which they must override each other
+ # starting from hard-coded defaults followed by build options and so on.
+ #
+ # Once all the linker options have been passed, we will start passing
+ # libraries and library paths from internal and external sources.
+ commands = linker.compiler_args()
+ # First, the trivial ones that are impossible to override.
+ #
+ # Add linker args for linking this target derived from 'base' build
+ # options passed on the command-line, in default_options, etc.
+ # These have the lowest priority.
+ if isinstance(target, build.StaticLibrary):
+ commands += linker.get_base_link_args(self.get_base_options_for_target(target))
+ else:
+ commands += compilers.get_base_link_args(self.get_base_options_for_target(target),
+ linker,
+ isinstance(target, build.SharedModule))
+ # Add -nostdlib if needed; can't be overridden
+ commands += self.get_no_stdlib_link_args(target, linker)
+ # Add things like /NOLOGO; usually can't be overridden
+ commands += linker.get_linker_always_args()
+ # Add buildtype linker args: optimization level, etc.
+ commands += linker.get_buildtype_linker_args(self.get_option_for_target(OptionKey('buildtype'), target))
+ # Add /DEBUG and the pdb filename when using MSVC
+ if self.get_option_for_target(OptionKey('debug'), target):
+ commands += self.get_link_debugfile_args(linker, target, outname)
+ debugfile = self.get_link_debugfile_name(linker, target, outname)
+ if debugfile is not None:
+ implicit_outs += [debugfile]
+ # Add link args specific to this BuildTarget type, such as soname args,
+ # PIC, import library generation, etc.
+ commands += self.get_target_type_link_args(target, linker)
+ # Archives that are copied wholesale in the result. Must be before any
+ # other link targets so missing symbols from whole archives are found in those.
+ if not isinstance(target, build.StaticLibrary):
+ commands += self.get_link_whole_args(linker, target)
+
+ if not isinstance(target, build.StaticLibrary):
+ # Add link args added using add_project_link_arguments()
+ commands += self.build.get_project_link_args(linker, target.subproject, target.for_machine)
+ # Add link args added using add_global_link_arguments()
+ # These override per-project link arguments
+ commands += self.build.get_global_link_args(linker, target.for_machine)
+ # Link args added from the env: LDFLAGS. We want these to override
+ # all the defaults but not the per-target link args.
+ commands += self.environment.coredata.get_external_link_args(target.for_machine, linker.get_language())
+
+ # Now we will add libraries and library paths from various sources
+
+ # Set runtime-paths so we can run executables without needing to set
+ # LD_LIBRARY_PATH, etc in the environment. Doesn't work on Windows.
+ if has_path_sep(target.name):
+ # Target names really should not have slashes in them, but
+ # unfortunately we did not check for that and some downstream projects
+ # now have them. Once slashes are forbidden, remove this bit.
+ target_slashname_workaround_dir = os.path.join(
+ os.path.dirname(target.name),
+ self.get_target_dir(target))
+ else:
+ target_slashname_workaround_dir = self.get_target_dir(target)
+ (rpath_args, target.rpath_dirs_to_remove) = (
+ linker.build_rpath_args(self.environment,
+ self.environment.get_build_dir(),
+ target_slashname_workaround_dir,
+ self.determine_rpath_dirs(target),
+ target.build_rpath,
+ target.install_rpath))
+ commands += rpath_args
+
+ # Add link args to link to all internal libraries (link_with:) and
+ # internal dependencies needed by this target.
+ if linker_base == 'STATIC':
+ # Link arguments of static libraries are not put in the command
+ # line of the library. They are instead appended to the command
+ # line where the static library is used.
+ dependencies = []
+ else:
+ dependencies = target.get_dependencies()
+ internal = self.build_target_link_arguments(linker, dependencies)
+ commands += internal
+ # Only non-static built targets need link args and link dependencies
+ if not isinstance(target, build.StaticLibrary):
+ # For 'automagic' deps: Boost and GTest. Also dependency('threads').
+ # pkg-config puts the thread flags itself via `Cflags:`
+
+ commands += linker.get_target_link_args(target)
+ # External deps must be last because target link libraries may depend on them.
+ for dep in target.get_external_deps():
+ # Extend without reordering or de-dup to preserve `-L -l` sets
+ # https://github.com/mesonbuild/meson/issues/1718
+ commands.extend_preserving_lflags(linker.get_dependency_link_args(dep))
+ for d in target.get_dependencies():
+ if isinstance(d, build.StaticLibrary):
+ for dep in d.get_external_deps():
+ commands.extend_preserving_lflags(linker.get_dependency_link_args(dep))
+
+ # Add link args specific to this BuildTarget type that must not be overridden by dependencies
+ commands += self.get_target_type_link_args_post_dependencies(target, linker)
+
+ # Add link args for c_* or cpp_* build options. Currently this only
+ # adds c_winlibs and cpp_winlibs when building for Windows. This needs
+ # to be after all internal and external libraries so that unresolved
+ # symbols from those can be found here. This is needed when the
+ # *_winlibs that we want to link to are static mingw64 libraries.
+ if isinstance(linker, Compiler):
+ # The static linker doesn't know what language it is building, so we
+ # don't know what option. Fortunately, it doesn't care to see the
+ # language-specific options either.
+ #
+ # We shouldn't check whether we are making a static library, because
+ # in the LTO case we do use a real compiler here.
+ commands += linker.get_option_link_args(self.environment.coredata.options)
+
+ dep_targets = []
+ dep_targets.extend(self.guess_external_link_dependencies(linker, target, commands, internal))
+
+ # Add libraries generated by custom targets
+ custom_target_libraries = self.get_custom_target_provided_libraries(target)
+ commands += extra_args
+ commands += custom_target_libraries
+ commands += stdlib_args # Standard library arguments go last, because they never depend on anything.
+ dep_targets.extend([self.get_dependency_filename(t) for t in dependencies])
+ dep_targets.extend([self.get_dependency_filename(t)
+ for t in target.link_depends])
+ elem = NinjaBuildElement(self.all_outputs, outname, linker_rule, obj_list, implicit_outs=implicit_outs)
+ elem.add_dep(dep_targets + custom_target_libraries)
+ elem.add_item('LINK_ARGS', commands)
+ return elem
+
+ def get_dependency_filename(self, t):
+ if isinstance(t, build.SharedLibrary):
+ return self.get_target_shsym_filename(t)
+ elif isinstance(t, mesonlib.File):
+ if t.is_built:
+ return t.relative_name()
+ else:
+ return t.absolute_path(self.environment.get_source_dir(),
+ self.environment.get_build_dir())
+ return self.get_target_filename(t)
+
+ def generate_shlib_aliases(self, target, outdir):
+ aliases = target.get_aliases()
+ for alias, to in aliases.items():
+ aliasfile = os.path.join(self.environment.get_build_dir(), outdir, alias)
+ try:
+ os.remove(aliasfile)
+ except Exception:
+ pass
+ try:
+ os.symlink(to, aliasfile)
+ except NotImplementedError:
+ mlog.debug("Library versioning disabled because symlinks are not supported.")
+ except OSError:
+ mlog.debug("Library versioning disabled because we do not have symlink creation privileges.")
+
+ def generate_custom_target_clean(self, trees):
+ e = NinjaBuildElement(self.all_outputs, 'meson-clean-ctlist', 'CUSTOM_COMMAND', 'PHONY')
+ d = CleanTrees(self.environment.get_build_dir(), trees)
+ d_file = os.path.join(self.environment.get_scratch_dir(), 'cleantrees.dat')
+ e.add_item('COMMAND', self.environment.get_build_command() + ['--internal', 'cleantrees', d_file])
+ e.add_item('description', 'Cleaning custom target directories')
+ self.add_build(e)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-clean-ctlist')
+ # Write out the data file passed to the script
+ with open(d_file, 'wb') as ofile:
+ pickle.dump(d, ofile)
+ return 'clean-ctlist'
+
+ def generate_gcov_clean(self):
+ gcno_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcno', 'CUSTOM_COMMAND', 'PHONY')
+ gcno_elem.add_item('COMMAND', mesonlib.get_meson_command() + ['--internal', 'delwithsuffix', '.', 'gcno'])
+ gcno_elem.add_item('description', 'Deleting gcno files')
+ self.add_build(gcno_elem)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-clean-gcno')
+
+ gcda_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcda', 'CUSTOM_COMMAND', 'PHONY')
+ gcda_elem.add_item('COMMAND', mesonlib.get_meson_command() + ['--internal', 'delwithsuffix', '.', 'gcda'])
+ gcda_elem.add_item('description', 'Deleting gcda files')
+ self.add_build(gcda_elem)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-clean-gcda')
+
+ def get_user_option_args(self):
+ cmds = []
+ for (k, v) in self.environment.coredata.options.items():
+ if k.is_project():
+ cmds.append('-D' + str(k) + '=' + (v.value if isinstance(v.value, str) else str(v.value).lower()))
+ # The order of these arguments must be the same between runs of Meson
+ # to ensure reproducible output. The order we pass them shouldn't
+ # affect behavior in any other way.
+ return sorted(cmds)
+
+ def generate_dist(self):
+ elem = NinjaBuildElement(self.all_outputs, 'meson-dist', 'CUSTOM_COMMAND', 'PHONY')
+ elem.add_item('DESC', 'Creating source packages')
+ elem.add_item('COMMAND', self.environment.get_build_command() + ['dist'])
+ elem.add_item('pool', 'console')
+ self.add_build(elem)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-dist')
+
+ def generate_scanbuild(self):
+ if not environment.detect_scanbuild():
+ return
+ if ('', 'scan-build') in self.build.run_target_names:
+ return
+ cmd = self.environment.get_build_command() + \
+ ['--internal', 'scanbuild', self.environment.source_dir, self.environment.build_dir] + \
+ self.environment.get_build_command() + self.get_user_option_args()
+ elem = NinjaBuildElement(self.all_outputs, 'meson-scan-build', 'CUSTOM_COMMAND', 'PHONY')
+ elem.add_item('COMMAND', cmd)
+ elem.add_item('pool', 'console')
+ self.add_build(elem)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-scan-build')
+
+ def generate_clangtool(self, name, extra_arg=None):
+ target_name = 'clang-' + name
+ extra_args = []
+ if extra_arg:
+ target_name += f'-{extra_arg}'
+ extra_args.append(f'--{extra_arg}')
+ if not os.path.exists(os.path.join(self.environment.source_dir, '.clang-' + name)) and \
+ not os.path.exists(os.path.join(self.environment.source_dir, '_clang-' + name)):
+ return
+ if target_name in self.all_outputs:
+ return
+ if ('', target_name) in self.build.run_target_names:
+ return
+ cmd = self.environment.get_build_command() + \
+ ['--internal', 'clang' + name, self.environment.source_dir, self.environment.build_dir] + \
+ extra_args
+ elem = NinjaBuildElement(self.all_outputs, 'meson-' + target_name, 'CUSTOM_COMMAND', 'PHONY')
+ elem.add_item('COMMAND', cmd)
+ elem.add_item('pool', 'console')
+ self.add_build(elem)
+ self.create_target_alias('meson-' + target_name)
+
+ def generate_clangformat(self):
+ if not environment.detect_clangformat():
+ return
+ self.generate_clangtool('format')
+ self.generate_clangtool('format', 'check')
+
+ def generate_clangtidy(self):
+ import shutil
+ if not shutil.which('clang-tidy'):
+ return
+ self.generate_clangtool('tidy')
+
+ def generate_tags(self, tool, target_name):
+ import shutil
+ if not shutil.which(tool):
+ return
+ if ('', target_name) in self.build.run_target_names:
+ return
+ if target_name in self.all_outputs:
+ return
+ cmd = self.environment.get_build_command() + \
+ ['--internal', 'tags', tool, self.environment.source_dir]
+ elem = NinjaBuildElement(self.all_outputs, 'meson-' + target_name, 'CUSTOM_COMMAND', 'PHONY')
+ elem.add_item('COMMAND', cmd)
+ elem.add_item('pool', 'console')
+ self.add_build(elem)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-' + target_name)
+
+ # For things like scan-build and other helper tools we might have.
+ def generate_utils(self):
+ self.generate_scanbuild()
+ self.generate_clangformat()
+ self.generate_clangtidy()
+ self.generate_tags('etags', 'TAGS')
+ self.generate_tags('ctags', 'ctags')
+ self.generate_tags('cscope', 'cscope')
+ cmd = self.environment.get_build_command() + ['--internal', 'uninstall']
+ elem = NinjaBuildElement(self.all_outputs, 'meson-uninstall', 'CUSTOM_COMMAND', 'PHONY')
+ elem.add_item('COMMAND', cmd)
+ elem.add_item('pool', 'console')
+ self.add_build(elem)
+ # Alias that runs the target defined above
+ self.create_target_alias('meson-uninstall')
+
+ def generate_ending(self):
+ targetlist = []
+ for t in self.get_build_by_default_targets().values():
+ # Add the first output of each target to the 'all' target so that
+ # they are all built
+ targetlist.append(os.path.join(self.get_target_dir(t), t.get_outputs()[0]))
+
+ elem = NinjaBuildElement(self.all_outputs, 'all', 'phony', targetlist)
+ self.add_build(elem)
+
+ elem = NinjaBuildElement(self.all_outputs, 'meson-clean', 'CUSTOM_COMMAND', 'PHONY')
+ elem.add_item('COMMAND', self.ninja_command + ['-t', 'clean'])
+ elem.add_item('description', 'Cleaning')
+ # Alias that runs the above-defined meson-clean target
+ self.create_target_alias('meson-clean')
+
+ # If we have custom targets in this project, add all their outputs to
+ # the list that is passed to the `cleantrees.py` script. The script
+ # will manually delete all custom_target outputs that are directories
+ # instead of files. This is needed because on platforms other than
+ # Windows, Ninja only deletes directories while cleaning if they are
+ # empty. https://github.com/mesonbuild/meson/issues/1220
+ ctlist = []
+ for t in self.build.get_targets().values():
+ if isinstance(t, build.CustomTarget):
+ # Create a list of all custom target outputs
+ for o in t.get_outputs():
+ ctlist.append(os.path.join(self.get_target_dir(t), o))
+ if ctlist:
+ elem.add_dep(self.generate_custom_target_clean(ctlist))
+
+ if OptionKey('b_coverage') in self.environment.coredata.options and \
+ self.environment.coredata.options[OptionKey('b_coverage')].value:
+ self.generate_gcov_clean()
+ elem.add_dep('clean-gcda')
+ elem.add_dep('clean-gcno')
+ self.add_build(elem)
+
+ deps = self.get_regen_filelist()
+ elem = NinjaBuildElement(self.all_outputs, 'build.ninja', 'REGENERATE_BUILD', deps)
+ elem.add_item('pool', 'console')
+ self.add_build(elem)
+
+ elem = NinjaBuildElement(self.all_outputs, 'reconfigure', 'REGENERATE_BUILD', 'PHONY')
+ elem.add_item('pool', 'console')
+ self.add_build(elem)
+
+ elem = NinjaBuildElement(self.all_outputs, deps, 'phony', '')
+ self.add_build(elem)
+
+ def get_introspection_data(self, target_id: str, target: build.Target) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
+ if target_id not in self.introspection_data or len(self.introspection_data[target_id]) == 0:
+ return super().get_introspection_data(target_id, target)
+
+ result = []
+ for i in self.introspection_data[target_id].values():
+ result += [i]
+ return result
+
+
+def _scan_fortran_file_deps(src: Path, srcdir: Path, dirname: Path, tdeps, compiler) -> T.List[str]:
+ """
+ scan a Fortran file for dependencies. Needs to be distinct from target
+ to allow for recursion induced by `include` statements.er
+
+ It makes a number of assumptions, including
+
+ * `use`, `module`, `submodule` name is not on a continuation line
+
+ Regex
+ -----
+
+ * `incre` works for `#include "foo.f90"` and `include "foo.f90"`
+ * `usere` works for legacy and Fortran 2003 `use` statements
+ * `submodre` is for Fortran >= 2008 `submodule`
+ """
+
+ incre = re.compile(FORTRAN_INCLUDE_PAT, re.IGNORECASE)
+ usere = re.compile(FORTRAN_USE_PAT, re.IGNORECASE)
+ submodre = re.compile(FORTRAN_SUBMOD_PAT, re.IGNORECASE)
+
+ mod_files = []
+ src = Path(src)
+ with src.open(encoding='ascii', errors='ignore') as f:
+ for line in f:
+ # included files
+ incmatch = incre.match(line)
+ if incmatch is not None:
+ incfile = src.parent / incmatch.group(1)
+ # NOTE: src.parent is most general, in particular for CMake subproject with Fortran file
+ # having an `include 'foo.f'` statement.
+ if incfile.suffix.lower()[1:] in compiler.file_suffixes:
+ mod_files.extend(_scan_fortran_file_deps(incfile, srcdir, dirname, tdeps, compiler))
+ # modules
+ usematch = usere.match(line)
+ if usematch is not None:
+ usename = usematch.group(1).lower()
+ if usename == 'intrinsic': # this keeps the regex simpler
+ continue
+ if usename not in tdeps:
+ # The module is not provided by any source file. This
+ # is due to:
+ # a) missing file/typo/etc
+ # b) using a module provided by the compiler, such as
+ # OpenMP
+ # There's no easy way to tell which is which (that I
+ # know of) so just ignore this and go on. Ideally we
+ # would print a warning message to the user but this is
+ # a common occurrence, which would lead to lots of
+ # distracting noise.
+ continue
+ srcfile = srcdir / tdeps[usename].fname # type: Path
+ if not srcfile.is_file():
+ if srcfile.name != src.name: # generated source file
+ pass
+ else: # subproject
+ continue
+ elif srcfile.samefile(src): # self-reference
+ continue
+
+ mod_name = compiler.module_name_to_filename(usename)
+ mod_files.append(str(dirname / mod_name))
+ else: # submodules
+ submodmatch = submodre.match(line)
+ if submodmatch is not None:
+ parents = submodmatch.group(1).lower().split(':')
+ assert len(parents) in (1, 2), (
+ 'submodule ancestry must be specified as'
+ f' ancestor:parent but Meson found {parents}')
+
+ ancestor_child = '_'.join(parents)
+ if ancestor_child not in tdeps:
+ raise MesonException("submodule {} relies on ancestor module {} that was not found.".format(submodmatch.group(2).lower(), ancestor_child.split('_')[0]))
+ submodsrcfile = srcdir / tdeps[ancestor_child].fname # type: Path
+ if not submodsrcfile.is_file():
+ if submodsrcfile.name != src.name: # generated source file
+ pass
+ else: # subproject
+ continue
+ elif submodsrcfile.samefile(src): # self-reference
+ continue
+ mod_name = compiler.module_name_to_filename(ancestor_child)
+ mod_files.append(str(dirname / mod_name))
+ return mod_files
diff --git a/meson/mesonbuild/backend/vs2010backend.py b/meson/mesonbuild/backend/vs2010backend.py
new file mode 100644
index 000000000..6e6e47fdb
--- /dev/null
+++ b/meson/mesonbuild/backend/vs2010backend.py
@@ -0,0 +1,1562 @@
+# Copyright 2014-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+import os
+import xml.dom.minidom
+import xml.etree.ElementTree as ET
+import uuid
+import typing as T
+from pathlib import Path, PurePath
+
+from . import backends
+from .. import build
+from .. import dependencies
+from .. import mlog
+from .. import compilers
+from ..interpreter import Interpreter
+from ..mesonlib import (
+ File, MesonException, python_command, replace_if_different, OptionKey, version_compare,
+)
+from ..environment import Environment, build_filename
+
+def autodetect_vs_version(build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]):
+ vs_version = os.getenv('VisualStudioVersion', None)
+ vs_install_dir = os.getenv('VSINSTALLDIR', None)
+ if not vs_install_dir:
+ raise MesonException('Could not detect Visual Studio: Environment variable VSINSTALLDIR is not set!\n'
+ 'Are you running meson from the Visual Studio Developer Command Prompt?')
+ # VisualStudioVersion is set since Visual Studio 11.0, but sometimes
+ # vcvarsall.bat doesn't set it, so also use VSINSTALLDIR
+ if vs_version == '11.0' or 'Visual Studio 11' in vs_install_dir:
+ from mesonbuild.backend.vs2012backend import Vs2012Backend
+ return Vs2012Backend(build, interpreter)
+ if vs_version == '12.0' or 'Visual Studio 12' in vs_install_dir:
+ from mesonbuild.backend.vs2013backend import Vs2013Backend
+ return Vs2013Backend(build, interpreter)
+ if vs_version == '14.0' or 'Visual Studio 14' in vs_install_dir:
+ from mesonbuild.backend.vs2015backend import Vs2015Backend
+ return Vs2015Backend(build, interpreter)
+ if vs_version == '15.0' or 'Visual Studio 17' in vs_install_dir or \
+ 'Visual Studio\\2017' in vs_install_dir:
+ from mesonbuild.backend.vs2017backend import Vs2017Backend
+ return Vs2017Backend(build, interpreter)
+ if vs_version == '16.0' or 'Visual Studio 19' in vs_install_dir or \
+ 'Visual Studio\\2019' in vs_install_dir:
+ from mesonbuild.backend.vs2019backend import Vs2019Backend
+ return Vs2019Backend(build, interpreter)
+ if 'Visual Studio 10.0' in vs_install_dir:
+ return Vs2010Backend(build, interpreter)
+ raise MesonException('Could not detect Visual Studio using VisualStudioVersion: {!r} or VSINSTALLDIR: {!r}!\n'
+ 'Please specify the exact backend to use.'.format(vs_version, vs_install_dir))
+
+def split_o_flags_args(args):
+ """
+ Splits any /O args and returns them. Does not take care of flags overriding
+ previous ones. Skips non-O flag arguments.
+
+ ['/Ox', '/Ob1'] returns ['/Ox', '/Ob1']
+ ['/Oxj', '/MP'] returns ['/Ox', '/Oj']
+ """
+ o_flags = []
+ for arg in args:
+ if not arg.startswith('/O'):
+ continue
+ flags = list(arg[2:])
+ # Assume that this one can't be clumped with the others since it takes
+ # an argument itself
+ if 'b' in flags:
+ o_flags.append(arg)
+ else:
+ o_flags += ['/O' + f for f in flags]
+ return o_flags
+
+def generate_guid_from_path(path, path_type):
+ return str(uuid.uuid5(uuid.NAMESPACE_URL, 'meson-vs-' + path_type + ':' + str(path))).upper()
+
+class Vs2010Backend(backends.Backend):
+ def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]):
+ super().__init__(build, interpreter)
+ self.name = 'vs2010'
+ self.project_file_version = '10.0.30319.1'
+ self.platform_toolset = None
+ self.vs_version = '2010'
+ self.windows_target_platform_version = None
+ self.subdirs = {}
+ self.handled_target_deps = {}
+
+ def get_target_private_dir(self, target):
+ return os.path.join(self.get_target_dir(target), target.get_id())
+
+ def generate_custom_generator_commands(self, target, parent_node):
+ generator_output_files = []
+ custom_target_include_dirs = []
+ custom_target_output_files = []
+ target_private_dir = self.relpath(self.get_target_private_dir(target), self.get_target_dir(target))
+ down = self.target_to_build_root(target)
+ for genlist in target.get_generated_sources():
+ if isinstance(genlist, (build.CustomTarget, build.CustomTargetIndex)):
+ for i in genlist.get_outputs():
+ # Path to the generated source from the current vcxproj dir via the build root
+ ipath = os.path.join(down, self.get_target_dir(genlist), i)
+ custom_target_output_files.append(ipath)
+ idir = self.relpath(self.get_target_dir(genlist), self.get_target_dir(target))
+ if idir not in custom_target_include_dirs:
+ custom_target_include_dirs.append(idir)
+ else:
+ generator = genlist.get_generator()
+ exe = generator.get_exe()
+ infilelist = genlist.get_inputs()
+ outfilelist = genlist.get_outputs()
+ source_dir = os.path.join(down, self.build_to_src, genlist.subdir)
+ exe_arr = self.build_target_to_cmd_array(exe)
+ idgroup = ET.SubElement(parent_node, 'ItemGroup')
+ for i in range(len(infilelist)):
+ if len(infilelist) == len(outfilelist):
+ sole_output = os.path.join(target_private_dir, outfilelist[i])
+ else:
+ sole_output = ''
+ curfile = infilelist[i]
+ infilename = os.path.join(down, curfile.rel_to_builddir(self.build_to_src))
+ deps = self.get_custom_target_depend_files(genlist, True)
+ base_args = generator.get_arglist(infilename)
+ outfiles_rel = genlist.get_outputs_for(curfile)
+ outfiles = [os.path.join(target_private_dir, of) for of in outfiles_rel]
+ generator_output_files += outfiles
+ args = [x.replace("@INPUT@", infilename).replace('@OUTPUT@', sole_output)
+ for x in base_args]
+ args = self.replace_outputs(args, target_private_dir, outfiles_rel)
+ args = [x.replace("@SOURCE_DIR@", self.environment.get_source_dir())
+ .replace("@BUILD_DIR@", target_private_dir)
+ for x in args]
+ args = [x.replace("@CURRENT_SOURCE_DIR@", source_dir) for x in args]
+ args = [x.replace("@SOURCE_ROOT@", self.environment.get_source_dir())
+ .replace("@BUILD_ROOT@", self.environment.get_build_dir())
+ for x in args]
+ args = [x.replace('\\', '/') for x in args]
+ cmd = exe_arr + self.replace_extra_args(args, genlist)
+ # Always use a wrapper because MSBuild eats random characters when
+ # there are many arguments.
+ tdir_abs = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
+ cmd, _ = self.as_meson_exe_cmdline(
+ 'generator ' + cmd[0],
+ cmd[0],
+ cmd[1:],
+ workdir=tdir_abs,
+ capture=outfiles[0] if generator.capture else None,
+ force_serialize=True
+ )
+ deps = cmd[-1:] + deps
+ abs_pdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
+ os.makedirs(abs_pdir, exist_ok=True)
+ cbs = ET.SubElement(idgroup, 'CustomBuild', Include=infilename)
+ ET.SubElement(cbs, 'Command').text = ' '.join(self.quote_arguments(cmd))
+ ET.SubElement(cbs, 'Outputs').text = ';'.join(outfiles)
+ ET.SubElement(cbs, 'AdditionalInputs').text = ';'.join(deps)
+ return generator_output_files, custom_target_output_files, custom_target_include_dirs
+
+ def generate(self):
+ target_machine = self.interpreter.builtin['target_machine'].cpu_family_method(None, None)
+ if target_machine == '64' or target_machine == 'x86_64':
+ # amd64 or x86_64
+ self.platform = 'x64'
+ elif target_machine == 'x86':
+ # x86
+ self.platform = 'Win32'
+ elif target_machine == 'aarch64' or target_machine == 'arm64':
+ target_cpu = self.interpreter.builtin['target_machine'].cpu_method(None, None)
+ if target_cpu == 'arm64ec':
+ self.platform = 'arm64ec'
+ else:
+ self.platform = 'arm64'
+ elif 'arm' in target_machine.lower():
+ self.platform = 'ARM'
+ else:
+ raise MesonException('Unsupported Visual Studio platform: ' + target_machine)
+ self.buildtype = self.environment.coredata.get_option(OptionKey('buildtype'))
+ self.optimization = self.environment.coredata.get_option(OptionKey('optimization'))
+ self.debug = self.environment.coredata.get_option(OptionKey('debug'))
+ try:
+ self.sanitize = self.environment.coredata.get_option(OptionKey('b_sanitize'))
+ except MesonException:
+ self.sanitize = 'none'
+ sln_filename = os.path.join(self.environment.get_build_dir(), self.build.project_name + '.sln')
+ projlist = self.generate_projects()
+ self.gen_testproj('RUN_TESTS', os.path.join(self.environment.get_build_dir(), 'RUN_TESTS.vcxproj'))
+ self.gen_installproj('RUN_INSTALL', os.path.join(self.environment.get_build_dir(), 'RUN_INSTALL.vcxproj'))
+ self.gen_regenproj('REGEN', os.path.join(self.environment.get_build_dir(), 'REGEN.vcxproj'))
+ self.generate_solution(sln_filename, projlist)
+ self.generate_regen_info()
+ Vs2010Backend.touch_regen_timestamp(self.environment.get_build_dir())
+
+ @staticmethod
+ def get_regen_stampfile(build_dir: str) -> None:
+ return os.path.join(os.path.join(build_dir, Environment.private_dir), 'regen.stamp')
+
+ @staticmethod
+ def touch_regen_timestamp(build_dir: str) -> None:
+ with open(Vs2010Backend.get_regen_stampfile(build_dir), 'w', encoding='utf-8'):
+ pass
+
+ def get_vcvars_command(self):
+ has_arch_values = 'VSCMD_ARG_TGT_ARCH' in os.environ and 'VSCMD_ARG_HOST_ARCH' in os.environ
+
+ # Use vcvarsall.bat if we found it.
+ if 'VCINSTALLDIR' in os.environ:
+ vs_version = os.environ['VisualStudioVersion'] \
+ if 'VisualStudioVersion' in os.environ else None
+ relative_path = 'Auxiliary\\Build\\' if vs_version is not None and vs_version >= '15.0' else ''
+ script_path = os.environ['VCINSTALLDIR'] + relative_path + 'vcvarsall.bat'
+ if os.path.exists(script_path):
+ if has_arch_values:
+ target_arch = os.environ['VSCMD_ARG_TGT_ARCH']
+ host_arch = os.environ['VSCMD_ARG_HOST_ARCH']
+ else:
+ target_arch = os.environ.get('Platform', 'x86')
+ host_arch = target_arch
+ arch = host_arch + '_' + target_arch if host_arch != target_arch else target_arch
+ return f'"{script_path}" {arch}'
+
+ # Otherwise try the VS2017 Developer Command Prompt.
+ if 'VS150COMNTOOLS' in os.environ and has_arch_values:
+ script_path = os.environ['VS150COMNTOOLS'] + 'VsDevCmd.bat'
+ if os.path.exists(script_path):
+ return '"%s" -arch=%s -host_arch=%s' % \
+ (script_path, os.environ['VSCMD_ARG_TGT_ARCH'], os.environ['VSCMD_ARG_HOST_ARCH'])
+ return ''
+
+ def get_obj_target_deps(self, obj_list):
+ result = {}
+ for o in obj_list:
+ if isinstance(o, build.ExtractedObjects):
+ result[o.target.get_id()] = o.target
+ return result.items()
+
+ def get_target_deps(self, t, recursive=False):
+ all_deps = {}
+ for target in t.values():
+ if isinstance(target, build.CustomTarget):
+ for d in target.get_target_dependencies():
+ all_deps[d.get_id()] = d
+ elif isinstance(target, build.RunTarget):
+ for d in target.get_dependencies():
+ all_deps[d.get_id()] = d
+ elif isinstance(target, build.BuildTarget):
+ for ldep in target.link_targets:
+ if isinstance(ldep, build.CustomTargetIndex):
+ all_deps[ldep.get_id()] = ldep.target
+ else:
+ all_deps[ldep.get_id()] = ldep
+ for ldep in target.link_whole_targets:
+ if isinstance(ldep, build.CustomTargetIndex):
+ all_deps[ldep.get_id()] = ldep.target
+ else:
+ all_deps[ldep.get_id()] = ldep
+ for obj_id, objdep in self.get_obj_target_deps(target.objects):
+ all_deps[obj_id] = objdep
+ else:
+ raise MesonException('Unknown target type for target %s' % target)
+
+ for gendep in target.get_generated_sources():
+ if isinstance(gendep, build.CustomTarget):
+ all_deps[gendep.get_id()] = gendep
+ elif isinstance(gendep, build.CustomTargetIndex):
+ all_deps[gendep.target.get_id()] = gendep.target
+ else:
+ generator = gendep.get_generator()
+ gen_exe = generator.get_exe()
+ if isinstance(gen_exe, build.Executable):
+ all_deps[gen_exe.get_id()] = gen_exe
+ for d in generator.depends:
+ if isinstance(d, build.CustomTargetIndex):
+ all_deps[d.get_id()] = d.target
+ else:
+ all_deps[d.get_id()] = d
+
+ if not t or not recursive:
+ return all_deps
+ ret = self.get_target_deps(all_deps, recursive)
+ ret.update(all_deps)
+ return ret
+
+ def generate_solution_dirs(self, ofile, parents):
+ prj_templ = 'Project("{%s}") = "%s", "%s", "{%s}"\n'
+ iterpaths = reversed(parents)
+ # Skip first path
+ next(iterpaths)
+ for path in iterpaths:
+ if path not in self.subdirs:
+ basename = path.name
+ identifier = generate_guid_from_path(path, 'subdir')
+ # top-level directories have None as their parent_dir
+ parent_dir = path.parent
+ parent_identifier = self.subdirs[parent_dir][0] \
+ if parent_dir != PurePath('.') else None
+ self.subdirs[path] = (identifier, parent_identifier)
+ prj_line = prj_templ % (
+ self.environment.coredata.lang_guids['directory'],
+ basename, basename, self.subdirs[path][0])
+ ofile.write(prj_line)
+ ofile.write('EndProject\n')
+
+ def generate_solution(self, sln_filename, projlist):
+ default_projlist = self.get_build_by_default_targets()
+ sln_filename_tmp = sln_filename + '~'
+ with open(sln_filename_tmp, 'w', encoding='utf-8') as ofile:
+ ofile.write('Microsoft Visual Studio Solution File, Format '
+ 'Version 11.00\n')
+ ofile.write('# Visual Studio ' + self.vs_version + '\n')
+ prj_templ = 'Project("{%s}") = "%s", "%s", "{%s}"\n'
+ for prj in projlist:
+ coredata = self.environment.coredata
+ if coredata.get_option(OptionKey('layout')) == 'mirror':
+ self.generate_solution_dirs(ofile, prj[1].parents)
+ target = self.build.targets[prj[0]]
+ lang = 'default'
+ if hasattr(target, 'compilers') and target.compilers:
+ for lang_out in target.compilers.keys():
+ lang = lang_out
+ break
+ prj_line = prj_templ % (
+ self.environment.coredata.lang_guids[lang],
+ prj[0], prj[1], prj[2])
+ ofile.write(prj_line)
+ target_dict = {target.get_id(): target}
+ # Get recursive deps
+ recursive_deps = self.get_target_deps(
+ target_dict, recursive=True)
+ ofile.write('EndProject\n')
+ for dep, target in recursive_deps.items():
+ if prj[0] in default_projlist:
+ default_projlist[dep] = target
+
+ test_line = prj_templ % (self.environment.coredata.lang_guids['default'],
+ 'RUN_TESTS', 'RUN_TESTS.vcxproj',
+ self.environment.coredata.test_guid)
+ ofile.write(test_line)
+ ofile.write('EndProject\n')
+ regen_line = prj_templ % (self.environment.coredata.lang_guids['default'],
+ 'REGEN', 'REGEN.vcxproj',
+ self.environment.coredata.regen_guid)
+ ofile.write(regen_line)
+ ofile.write('EndProject\n')
+ install_line = prj_templ % (self.environment.coredata.lang_guids['default'],
+ 'RUN_INSTALL', 'RUN_INSTALL.vcxproj',
+ self.environment.coredata.install_guid)
+ ofile.write(install_line)
+ ofile.write('EndProject\n')
+ ofile.write('Global\n')
+ ofile.write('\tGlobalSection(SolutionConfigurationPlatforms) = '
+ 'preSolution\n')
+ ofile.write('\t\t%s|%s = %s|%s\n' %
+ (self.buildtype, self.platform, self.buildtype,
+ self.platform))
+ ofile.write('\tEndGlobalSection\n')
+ ofile.write('\tGlobalSection(ProjectConfigurationPlatforms) = '
+ 'postSolution\n')
+ ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
+ (self.environment.coredata.regen_guid, self.buildtype,
+ self.platform, self.buildtype, self.platform))
+ ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' %
+ (self.environment.coredata.regen_guid, self.buildtype,
+ self.platform, self.buildtype, self.platform))
+ # Create the solution configuration
+ for p in projlist:
+ # Add to the list of projects in this solution
+ ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
+ (p[2], self.buildtype, self.platform,
+ self.buildtype, self.platform))
+ if p[0] in default_projlist and \
+ not isinstance(self.build.targets[p[0]], build.RunTarget):
+ # Add to the list of projects to be built
+ ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' %
+ (p[2], self.buildtype, self.platform,
+ self.buildtype, self.platform))
+ ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
+ (self.environment.coredata.test_guid, self.buildtype,
+ self.platform, self.buildtype, self.platform))
+ ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
+ (self.environment.coredata.install_guid, self.buildtype,
+ self.platform, self.buildtype, self.platform))
+ ofile.write('\tEndGlobalSection\n')
+ ofile.write('\tGlobalSection(SolutionProperties) = preSolution\n')
+ ofile.write('\t\tHideSolutionNode = FALSE\n')
+ ofile.write('\tEndGlobalSection\n')
+ if self.subdirs:
+ ofile.write('\tGlobalSection(NestedProjects) = '
+ 'preSolution\n')
+ for p in projlist:
+ if p[1].parent != PurePath('.'):
+ ofile.write("\t\t{{{}}} = {{{}}}\n".format(p[2], self.subdirs[p[1].parent][0]))
+ for subdir in self.subdirs.values():
+ if subdir[1]:
+ ofile.write("\t\t{{{}}} = {{{}}}\n".format(subdir[0], subdir[1]))
+ ofile.write('\tEndGlobalSection\n')
+ ofile.write('EndGlobal\n')
+ replace_if_different(sln_filename, sln_filename_tmp)
+
+ def generate_projects(self):
+ startup_project = self.environment.coredata.options[OptionKey('backend_startup_project')].value
+ projlist = []
+ startup_idx = 0
+ for (i, (name, target)) in enumerate(self.build.targets.items()):
+ if startup_project and startup_project == target.get_basename():
+ startup_idx = i
+ outdir = Path(
+ self.environment.get_build_dir(),
+ self.get_target_dir(target)
+ )
+ outdir.mkdir(exist_ok=True, parents=True)
+ fname = name + '.vcxproj'
+ target_dir = PurePath(self.get_target_dir(target))
+ relname = target_dir / fname
+ projfile_path = outdir / fname
+ proj_uuid = self.environment.coredata.target_guids[name]
+ self.gen_vcxproj(target, str(projfile_path), proj_uuid)
+ projlist.append((name, relname, proj_uuid))
+
+ # Put the startup project first in the project list
+ if startup_idx:
+ projlist = [projlist[startup_idx]] + projlist[0:startup_idx] + projlist[startup_idx + 1:-1]
+
+ return projlist
+
+ def split_sources(self, srclist):
+ sources = []
+ headers = []
+ objects = []
+ languages = []
+ for i in srclist:
+ if self.environment.is_header(i):
+ headers.append(i)
+ elif self.environment.is_object(i):
+ objects.append(i)
+ elif self.environment.is_source(i):
+ sources.append(i)
+ lang = self.lang_from_source_file(i)
+ if lang not in languages:
+ languages.append(lang)
+ elif self.environment.is_library(i):
+ pass
+ else:
+ # Everything that is not an object or source file is considered a header.
+ headers.append(i)
+ return sources, headers, objects, languages
+
+ def target_to_build_root(self, target):
+ if self.get_target_dir(target) == '':
+ return ''
+
+ directories = os.path.normpath(self.get_target_dir(target)).split(os.sep)
+ return os.sep.join(['..'] * len(directories))
+
+ def quote_arguments(self, arr):
+ return ['"%s"' % i for i in arr]
+
+ def add_project_reference(self, root, include, projid, link_outputs=False):
+ ig = ET.SubElement(root, 'ItemGroup')
+ pref = ET.SubElement(ig, 'ProjectReference', Include=include)
+ ET.SubElement(pref, 'Project').text = '{%s}' % projid
+ if not link_outputs:
+ # Do not link in generated .lib files from dependencies automatically.
+ # We only use the dependencies for ordering and link in the generated
+ # objects and .lib files manually.
+ ET.SubElement(pref, 'LinkLibraryDependencies').text = 'false'
+
+ def add_target_deps(self, root, target):
+ target_dict = {target.get_id(): target}
+ for dep in self.get_target_deps(target_dict).values():
+ if dep.get_id() in self.handled_target_deps[target.get_id()]:
+ # This dependency was already handled manually.
+ continue
+ relpath = self.get_target_dir_relative_to(dep, target)
+ vcxproj = os.path.join(relpath, dep.get_id() + '.vcxproj')
+ tid = self.environment.coredata.target_guids[dep.get_id()]
+ self.add_project_reference(root, vcxproj, tid)
+
+ def create_basic_crap(self, target, guid):
+ project_name = target.name
+ root = ET.Element('Project', {'DefaultTargets': "Build",
+ 'ToolsVersion': '4.0',
+ 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'})
+ confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'})
+ prjconf = ET.SubElement(confitems, 'ProjectConfiguration',
+ {'Include': self.buildtype + '|' + self.platform})
+ p = ET.SubElement(prjconf, 'Configuration')
+ p.text = self.buildtype
+ pl = ET.SubElement(prjconf, 'Platform')
+ pl.text = self.platform
+ globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals')
+ guidelem = ET.SubElement(globalgroup, 'ProjectGuid')
+ guidelem.text = '{%s}' % guid
+ kw = ET.SubElement(globalgroup, 'Keyword')
+ kw.text = self.platform + 'Proj'
+ p = ET.SubElement(globalgroup, 'Platform')
+ p.text = self.platform
+ pname = ET.SubElement(globalgroup, 'ProjectName')
+ pname.text = project_name
+ if self.windows_target_platform_version:
+ ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
+ ET.SubElement(type_config, 'ConfigurationType')
+ ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
+ ET.SubElement(type_config, 'UseOfMfc').text = 'false'
+ if self.platform_toolset:
+ ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
+ direlem = ET.SubElement(root, 'PropertyGroup')
+ fver = ET.SubElement(direlem, '_ProjectFileVersion')
+ fver.text = self.project_file_version
+ outdir = ET.SubElement(direlem, 'OutDir')
+ outdir.text = '.\\'
+ intdir = ET.SubElement(direlem, 'IntDir')
+ intdir.text = target.get_id() + '\\'
+ tname = ET.SubElement(direlem, 'TargetName')
+ tname.text = target.name
+ return root
+
+ def gen_run_target_vcxproj(self, target, ofname, guid):
+ root = self.create_basic_crap(target, guid)
+ if not target.command:
+ # FIXME: This is an alias target that doesn't run any command, there
+ # is probably a better way than running a this dummy command.
+ cmd_raw = python_command + ['-c', 'exit']
+ else:
+ _, _, cmd_raw = self.eval_custom_target_command(target)
+ depend_files = self.get_custom_target_depend_files(target)
+ target_env = self.get_run_target_env(target)
+ wrapper_cmd, _ = self.as_meson_exe_cmdline(target.name, target.command[0], cmd_raw[1:],
+ force_serialize=True, env=target_env,
+ verbose=True)
+ self.add_custom_build(root, 'run_target', ' '.join(self.quote_arguments(wrapper_cmd)),
+ deps=depend_files)
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
+ self.add_regen_dependency(root)
+ self.add_target_deps(root, target)
+ self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
+
+ def gen_custom_target_vcxproj(self, target, ofname, guid):
+ root = self.create_basic_crap(target, guid)
+ # We need to always use absolute paths because our invocation is always
+ # from the target dir, not the build root.
+ target.absolute_paths = True
+ (srcs, ofilenames, cmd) = self.eval_custom_target_command(target, True)
+ depend_files = self.get_custom_target_depend_files(target, True)
+ # Always use a wrapper because MSBuild eats random characters when
+ # there are many arguments.
+ tdir_abs = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
+ extra_bdeps = target.get_transitive_build_target_deps()
+ wrapper_cmd, _ = self.as_meson_exe_cmdline(target.name, target.command[0], cmd[1:],
+ # All targets run from the target dir
+ workdir=tdir_abs,
+ extra_bdeps=extra_bdeps,
+ capture=ofilenames[0] if target.capture else None,
+ feed=srcs[0] if target.feed else None,
+ force_serialize=True,
+ env=target.env)
+ if target.build_always_stale:
+ # Use a nonexistent file to always consider the target out-of-date.
+ ofilenames += [self.nonexistent_file(os.path.join(self.environment.get_scratch_dir(),
+ 'outofdate.file'))]
+ self.add_custom_build(root, 'custom_target', ' '.join(self.quote_arguments(wrapper_cmd)),
+ deps=wrapper_cmd[-1:] + srcs + depend_files, outputs=ofilenames,
+ verify_files=not target.build_always_stale)
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
+ self.generate_custom_generator_commands(target, root)
+ self.add_regen_dependency(root)
+ self.add_target_deps(root, target)
+ self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
+
+ @classmethod
+ def lang_from_source_file(cls, src):
+ ext = src.split('.')[-1]
+ if ext in compilers.c_suffixes:
+ return 'c'
+ if ext in compilers.cpp_suffixes:
+ return 'cpp'
+ raise MesonException('Could not guess language from source file %s.' % src)
+
+ def add_pch(self, pch_sources, lang, inc_cl):
+ if lang in pch_sources:
+ self.use_pch(pch_sources, lang, inc_cl)
+
+ def create_pch(self, pch_sources, lang, inc_cl):
+ pch = ET.SubElement(inc_cl, 'PrecompiledHeader')
+ pch.text = 'Create'
+ self.add_pch_files(pch_sources, lang, inc_cl)
+
+ def use_pch(self, pch_sources, lang, inc_cl):
+ pch = ET.SubElement(inc_cl, 'PrecompiledHeader')
+ pch.text = 'Use'
+ header = self.add_pch_files(pch_sources, lang, inc_cl)
+ pch_include = ET.SubElement(inc_cl, 'ForcedIncludeFiles')
+ pch_include.text = header + ';%(ForcedIncludeFiles)'
+
+ def add_pch_files(self, pch_sources, lang, inc_cl):
+ header = os.path.basename(pch_sources[lang][0])
+ pch_file = ET.SubElement(inc_cl, 'PrecompiledHeaderFile')
+ # When USING PCHs, MSVC will not do the regular include
+ # directory lookup, but simply use a string match to find the
+ # PCH to use. That means the #include directive must match the
+ # pch_file.text used during PCH CREATION verbatim.
+ # When CREATING a PCH, MSVC will do the include directory
+ # lookup to find the actual PCH header to use. Thus, the PCH
+ # header must either be in the include_directories of the target
+ # or be in the same directory as the PCH implementation.
+ pch_file.text = header
+ pch_out = ET.SubElement(inc_cl, 'PrecompiledHeaderOutputFile')
+ pch_out.text = '$(IntDir)$(TargetName)-%s.pch' % lang
+ return header
+
+ def is_argument_with_msbuild_xml_entry(self, entry):
+ # Remove arguments that have a top level XML entry so
+ # they are not used twice.
+ # FIXME add args as needed.
+ if entry[1:].startswith('fsanitize'):
+ return True
+ return entry[1:].startswith('M')
+
+ def add_additional_options(self, lang, parent_node, file_args):
+ args = []
+ for arg in file_args[lang].to_native():
+ if self.is_argument_with_msbuild_xml_entry(arg):
+ continue
+ if arg == '%(AdditionalOptions)':
+ args.append(arg)
+ else:
+ args.append(self.escape_additional_option(arg))
+ ET.SubElement(parent_node, "AdditionalOptions").text = ' '.join(args)
+
+ def add_preprocessor_defines(self, lang, parent_node, file_defines):
+ defines = []
+ for define in file_defines[lang]:
+ if define == '%(PreprocessorDefinitions)':
+ defines.append(define)
+ else:
+ defines.append(self.escape_preprocessor_define(define))
+ ET.SubElement(parent_node, "PreprocessorDefinitions").text = ';'.join(defines)
+
+ def add_include_dirs(self, lang, parent_node, file_inc_dirs):
+ dirs = file_inc_dirs[lang]
+ ET.SubElement(parent_node, "AdditionalIncludeDirectories").text = ';'.join(dirs)
+
+ @staticmethod
+ def has_objects(objects, additional_objects, generated_objects):
+ # Ignore generated objects, those are automatically used by MSBuild because they are part of
+ # the CustomBuild Outputs.
+ return len(objects) + len(additional_objects) > 0
+
+ @staticmethod
+ def add_generated_objects(node, generated_objects):
+ # Do not add generated objects to project file. Those are automatically used by MSBuild, because
+ # they are part of the CustomBuild Outputs.
+ return
+
+ @staticmethod
+ def escape_preprocessor_define(define):
+ # See: https://msdn.microsoft.com/en-us/library/bb383819.aspx
+ table = str.maketrans({'%': '%25', '$': '%24', '@': '%40',
+ "'": '%27', ';': '%3B', '?': '%3F', '*': '%2A',
+ # We need to escape backslash because it'll be un-escaped by
+ # Windows during process creation when it parses the arguments
+ # Basically, this converts `\` to `\\`.
+ '\\': '\\\\'})
+ return define.translate(table)
+
+ @staticmethod
+ def escape_additional_option(option):
+ # See: https://msdn.microsoft.com/en-us/library/bb383819.aspx
+ table = str.maketrans({'%': '%25', '$': '%24', '@': '%40',
+ "'": '%27', ';': '%3B', '?': '%3F', '*': '%2A', ' ': '%20'})
+ option = option.translate(table)
+ # Since we're surrounding the option with ", if it ends in \ that will
+ # escape the " when the process arguments are parsed and the starting
+ # " will not terminate. So we escape it if that's the case. I'm not
+ # kidding, this is how escaping works for process args on Windows.
+ if option.endswith('\\'):
+ option += '\\'
+ return f'"{option}"'
+
+ @staticmethod
+ def split_link_args(args):
+ """
+ Split a list of link arguments into three lists:
+ * library search paths
+ * library filenames (or paths)
+ * other link arguments
+ """
+ lpaths = []
+ libs = []
+ other = []
+ for arg in args:
+ if arg.startswith('/LIBPATH:'):
+ lpath = arg[9:]
+ # De-dup library search paths by removing older entries when
+ # a new one is found. This is necessary because unlike other
+ # search paths such as the include path, the library is
+ # searched for in the newest (right-most) search path first.
+ if lpath in lpaths:
+ lpaths.remove(lpath)
+ lpaths.append(lpath)
+ elif arg.startswith(('/', '-')):
+ other.append(arg)
+ # It's ok if we miss libraries with non-standard extensions here.
+ # They will go into the general link arguments.
+ elif arg.endswith('.lib') or arg.endswith('.a'):
+ # De-dup
+ if arg not in libs:
+ libs.append(arg)
+ else:
+ other.append(arg)
+ return lpaths, libs, other
+
+ def _get_cl_compiler(self, target):
+ for lang, c in target.compilers.items():
+ if lang in ('c', 'cpp'):
+ return c
+ # No source files, only objects, but we still need a compiler, so
+ # return a found compiler
+ if len(target.objects) > 0:
+ for lang, c in self.environment.coredata.compilers[target.for_machine].items():
+ if lang in ('c', 'cpp'):
+ return c
+ raise MesonException('Could not find a C or C++ compiler. MSVC can only build C/C++ projects.')
+
+ def _prettyprint_vcxproj_xml(self, tree, ofname):
+ ofname_tmp = ofname + '~'
+ tree.write(ofname_tmp, encoding='utf-8', xml_declaration=True)
+
+ # ElementTree can not do prettyprinting so do it manually
+ doc = xml.dom.minidom.parse(ofname_tmp)
+ with open(ofname_tmp, 'w', encoding='utf-8') as of:
+ of.write(doc.toprettyxml())
+ replace_if_different(ofname, ofname_tmp)
+
+ def gen_vcxproj(self, target, ofname, guid):
+ mlog.debug('Generating vcxproj %s.' % target.name)
+ subsystem = 'Windows'
+ self.handled_target_deps[target.get_id()] = []
+ if isinstance(target, build.Executable):
+ conftype = 'Application'
+ if target.gui_app is not None:
+ if not target.gui_app:
+ subsystem = 'Console'
+ else:
+ # If someone knows how to set the version properly,
+ # please send a patch.
+ subsystem = target.win_subsystem.split(',')[0]
+ elif isinstance(target, build.StaticLibrary):
+ conftype = 'StaticLibrary'
+ elif isinstance(target, build.SharedLibrary):
+ conftype = 'DynamicLibrary'
+ elif isinstance(target, build.CustomTarget):
+ return self.gen_custom_target_vcxproj(target, ofname, guid)
+ elif isinstance(target, build.RunTarget):
+ return self.gen_run_target_vcxproj(target, ofname, guid)
+ else:
+ raise MesonException('Unknown target type for %s' % target.get_basename())
+ # Prefix to use to access the build root from the vcxproj dir
+ down = self.target_to_build_root(target)
+ # Prefix to use to access the source tree's root from the vcxproj dir
+ proj_to_src_root = os.path.join(down, self.build_to_src)
+ # Prefix to use to access the source tree's subdir from the vcxproj dir
+ proj_to_src_dir = os.path.join(proj_to_src_root, self.get_target_dir(target))
+ (sources, headers, objects, languages) = self.split_sources(target.sources)
+ if self.is_unity(target):
+ sources = self.generate_unity_files(target, sources)
+ compiler = self._get_cl_compiler(target)
+ build_args = compiler.get_buildtype_args(self.buildtype)
+ build_args += compiler.get_optimization_args(self.optimization)
+ build_args += compiler.get_debug_args(self.debug)
+ build_args += compiler.sanitizer_compile_args(self.sanitize)
+ buildtype_link_args = compiler.get_buildtype_linker_args(self.buildtype)
+ vscrt_type = self.environment.coredata.options[OptionKey('b_vscrt')]
+ project_name = target.name
+ target_name = target.name
+ root = ET.Element('Project', {'DefaultTargets': "Build",
+ 'ToolsVersion': '4.0',
+ 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'})
+ confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'})
+ prjconf = ET.SubElement(confitems, 'ProjectConfiguration',
+ {'Include': self.buildtype + '|' + self.platform})
+ p = ET.SubElement(prjconf, 'Configuration')
+ p.text = self.buildtype
+ pl = ET.SubElement(prjconf, 'Platform')
+ pl.text = self.platform
+ # Globals
+ globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals')
+ guidelem = ET.SubElement(globalgroup, 'ProjectGuid')
+ guidelem.text = '{%s}' % guid
+ kw = ET.SubElement(globalgroup, 'Keyword')
+ kw.text = self.platform + 'Proj'
+ ns = ET.SubElement(globalgroup, 'RootNamespace')
+ ns.text = target_name
+ p = ET.SubElement(globalgroup, 'Platform')
+ p.text = self.platform
+ pname = ET.SubElement(globalgroup, 'ProjectName')
+ pname.text = project_name
+ if self.windows_target_platform_version:
+ ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ # Start configuration
+ type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
+ ET.SubElement(type_config, 'ConfigurationType').text = conftype
+ ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
+ if self.platform_toolset:
+ ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
+ # FIXME: Meson's LTO support needs to be integrated here
+ ET.SubElement(type_config, 'WholeProgramOptimization').text = 'false'
+ # Let VS auto-set the RTC level
+ ET.SubElement(type_config, 'BasicRuntimeChecks').text = 'Default'
+ # Incremental linking increases code size
+ if '/INCREMENTAL:NO' in buildtype_link_args:
+ ET.SubElement(type_config, 'LinkIncremental').text = 'false'
+
+ # Build information
+ compiles = ET.SubElement(root, 'ItemDefinitionGroup')
+ clconf = ET.SubElement(compiles, 'ClCompile')
+ # CRT type; debug or release
+ if vscrt_type.value == 'from_buildtype':
+ if self.buildtype == 'debug':
+ ET.SubElement(type_config, 'UseDebugLibraries').text = 'true'
+ ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL'
+ else:
+ ET.SubElement(type_config, 'UseDebugLibraries').text = 'false'
+ ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDLL'
+ elif vscrt_type.value == 'static_from_buildtype':
+ if self.buildtype == 'debug':
+ ET.SubElement(type_config, 'UseDebugLibraries').text = 'true'
+ ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebug'
+ else:
+ ET.SubElement(type_config, 'UseDebugLibraries').text = 'false'
+ ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreaded'
+ elif vscrt_type.value == 'mdd':
+ ET.SubElement(type_config, 'UseDebugLibraries').text = 'true'
+ ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL'
+ elif vscrt_type.value == 'mt':
+ # FIXME, wrong
+ ET.SubElement(type_config, 'UseDebugLibraries').text = 'false'
+ ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreaded'
+ elif vscrt_type.value == 'mtd':
+ # FIXME, wrong
+ ET.SubElement(type_config, 'UseDebugLibraries').text = 'true'
+ ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebug'
+ else:
+ ET.SubElement(type_config, 'UseDebugLibraries').text = 'false'
+ ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDLL'
+ # Sanitizers
+ if '/fsanitize=address' in build_args:
+ ET.SubElement(type_config, 'EnableASAN').text = 'true'
+ # Debug format
+ if '/ZI' in build_args:
+ ET.SubElement(clconf, 'DebugInformationFormat').text = 'EditAndContinue'
+ elif '/Zi' in build_args:
+ ET.SubElement(clconf, 'DebugInformationFormat').text = 'ProgramDatabase'
+ elif '/Z7' in build_args:
+ ET.SubElement(clconf, 'DebugInformationFormat').text = 'OldStyle'
+ else:
+ ET.SubElement(clconf, 'DebugInformationFormat').text = 'None'
+ # Runtime checks
+ if '/RTC1' in build_args:
+ ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'EnableFastChecks'
+ elif '/RTCu' in build_args:
+ ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'UninitializedLocalUsageCheck'
+ elif '/RTCs' in build_args:
+ ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'StackFrameRuntimeCheck'
+ # Exception handling has to be set in the xml in addition to the "AdditionalOptions" because otherwise
+ # cl will give warning D9025: overriding '/Ehs' with cpp_eh value
+ if 'cpp' in target.compilers:
+ eh = self.environment.coredata.options[OptionKey('eh', machine=target.for_machine, lang='cpp')]
+ if eh.value == 'a':
+ ET.SubElement(clconf, 'ExceptionHandling').text = 'Async'
+ elif eh.value == 's':
+ ET.SubElement(clconf, 'ExceptionHandling').text = 'SyncCThrow'
+ elif eh.value == 'none':
+ ET.SubElement(clconf, 'ExceptionHandling').text = 'false'
+ else: # 'sc' or 'default'
+ ET.SubElement(clconf, 'ExceptionHandling').text = 'Sync'
+ # End configuration
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
+ generated_files, custom_target_output_files, generated_files_include_dirs = self.generate_custom_generator_commands(target, root)
+ (gen_src, gen_hdrs, gen_objs, gen_langs) = self.split_sources(generated_files)
+ (custom_src, custom_hdrs, custom_objs, custom_langs) = self.split_sources(custom_target_output_files)
+ gen_src += custom_src
+ gen_hdrs += custom_hdrs
+ gen_langs += custom_langs
+ # Project information
+ direlem = ET.SubElement(root, 'PropertyGroup')
+ fver = ET.SubElement(direlem, '_ProjectFileVersion')
+ fver.text = self.project_file_version
+ outdir = ET.SubElement(direlem, 'OutDir')
+ outdir.text = '.\\'
+ intdir = ET.SubElement(direlem, 'IntDir')
+ intdir.text = target.get_id() + '\\'
+ tfilename = os.path.splitext(target.get_filename())
+ ET.SubElement(direlem, 'TargetName').text = tfilename[0]
+ ET.SubElement(direlem, 'TargetExt').text = tfilename[1]
+
+ # Arguments, include dirs, defines for all files in the current target
+ target_args = []
+ target_defines = []
+ target_inc_dirs = []
+ # Arguments, include dirs, defines passed to individual files in
+ # a target; perhaps because the args are language-specific
+ #
+ # file_args is also later split out into defines and include_dirs in
+ # case someone passed those in there
+ file_args = {l: c.compiler_args() for l, c in target.compilers.items()}
+ file_defines = {l: [] for l in target.compilers}
+ file_inc_dirs = {l: [] for l in target.compilers}
+ # The order in which these compile args are added must match
+ # generate_single_compile() and generate_basic_compiler_args()
+ for l, comp in target.compilers.items():
+ if l in file_args:
+ file_args[l] += compilers.get_base_compile_args(
+ self.get_base_options_for_target(target), comp)
+ file_args[l] += comp.get_option_compile_args(
+ self.environment.coredata.options)
+
+ # Add compile args added using add_project_arguments()
+ for l, args in self.build.projects_args[target.for_machine].get(target.subproject, {}).items():
+ if l in file_args:
+ file_args[l] += args
+ # Add compile args added using add_global_arguments()
+ # These override per-project arguments
+ for l, args in self.build.global_args[target.for_machine].items():
+ if l in file_args:
+ file_args[l] += args
+ # Compile args added from the env or cross file: CFLAGS/CXXFLAGS, etc. We want these
+ # to override all the defaults, but not the per-target compile args.
+ for l in file_args.keys():
+ opts = self.environment.coredata.options[OptionKey('args', machine=target.for_machine, lang=l)]
+ file_args[l] += opts.value
+ for args in file_args.values():
+ # This is where Visual Studio will insert target_args, target_defines,
+ # etc, which are added later from external deps (see below).
+ args += ['%(AdditionalOptions)', '%(PreprocessorDefinitions)', '%(AdditionalIncludeDirectories)']
+ # Add custom target dirs as includes automatically, but before
+ # target-specific include dirs. See _generate_single_compile() in
+ # the ninja backend for caveats.
+ args += ['-I' + arg for arg in generated_files_include_dirs]
+ # Add include dirs from the `include_directories:` kwarg on the target
+ # and from `include_directories:` of internal deps of the target.
+ #
+ # Target include dirs should override internal deps include dirs.
+ # This is handled in BuildTarget.process_kwargs()
+ #
+ # Include dirs from internal deps should override include dirs from
+ # external deps and must maintain the order in which they are
+ # specified. Hence, we must reverse so that the order is preserved.
+ #
+ # These are per-target, but we still add them as per-file because we
+ # need them to be looked in first.
+ for d in reversed(target.get_include_dirs()):
+ # reversed is used to keep order of includes
+ for i in reversed(d.get_incdirs()):
+ curdir = os.path.join(d.get_curdir(), i)
+ args.append('-I' + self.relpath(curdir, target.subdir)) # build dir
+ args.append('-I' + os.path.join(proj_to_src_root, curdir)) # src dir
+ for i in d.get_extra_build_dirs():
+ curdir = os.path.join(d.get_curdir(), i)
+ args.append('-I' + self.relpath(curdir, target.subdir)) # build dir
+ # Add per-target compile args, f.ex, `c_args : ['/DFOO']`. We set these
+ # near the end since these are supposed to override everything else.
+ for l, args in target.extra_args.items():
+ if l in file_args:
+ file_args[l] += args
+ # The highest priority includes. In order of directory search:
+ # target private dir, target build dir, target source dir
+ for args in file_args.values():
+ t_inc_dirs = [self.relpath(self.get_target_private_dir(target),
+ self.get_target_dir(target))]
+ if target.implicit_include_directories:
+ t_inc_dirs += ['.', proj_to_src_dir]
+ args += ['-I' + arg for arg in t_inc_dirs]
+
+ # Split preprocessor defines and include directories out of the list of
+ # all extra arguments. The rest go into %(AdditionalOptions).
+ for l, args in file_args.items():
+ for arg in args[:]:
+ if arg.startswith(('-D', '/D')) or arg == '%(PreprocessorDefinitions)':
+ file_args[l].remove(arg)
+ # Don't escape the marker
+ if arg == '%(PreprocessorDefinitions)':
+ define = arg
+ else:
+ define = arg[2:]
+ # De-dup
+ if define not in file_defines[l]:
+ file_defines[l].append(define)
+ elif arg.startswith(('-I', '/I')) or arg == '%(AdditionalIncludeDirectories)':
+ file_args[l].remove(arg)
+ # Don't escape the marker
+ if arg == '%(AdditionalIncludeDirectories)':
+ inc_dir = arg
+ else:
+ inc_dir = arg[2:]
+ # De-dup
+ if inc_dir not in file_inc_dirs[l]:
+ file_inc_dirs[l].append(inc_dir)
+ # Add include dirs to target as well so that "Go to Document" works in headers
+ if inc_dir not in target_inc_dirs:
+ target_inc_dirs.append(inc_dir)
+
+ # Split compile args needed to find external dependencies
+ # Link args are added while generating the link command
+ for d in reversed(target.get_external_deps()):
+ # Cflags required by external deps might have UNIX-specific flags,
+ # so filter them out if needed
+ if isinstance(d, dependencies.OpenMPDependency):
+ ET.SubElement(clconf, 'OpenMPSupport').text = 'true'
+ else:
+ d_compile_args = compiler.unix_args_to_native(d.get_compile_args())
+ for arg in d_compile_args:
+ if arg.startswith(('-D', '/D')):
+ define = arg[2:]
+ # De-dup
+ if define in target_defines:
+ target_defines.remove(define)
+ target_defines.append(define)
+ elif arg.startswith(('-I', '/I')):
+ inc_dir = arg[2:]
+ # De-dup
+ if inc_dir not in target_inc_dirs:
+ target_inc_dirs.append(inc_dir)
+ else:
+ target_args.append(arg)
+
+ languages += gen_langs
+ if '/Gw' in build_args:
+ target_args.append('/Gw')
+ if len(target_args) > 0:
+ target_args.append('%(AdditionalOptions)')
+ ET.SubElement(clconf, "AdditionalOptions").text = ' '.join(target_args)
+ ET.SubElement(clconf, 'AdditionalIncludeDirectories').text = ';'.join(target_inc_dirs)
+ target_defines.append('%(PreprocessorDefinitions)')
+ ET.SubElement(clconf, 'PreprocessorDefinitions').text = ';'.join(target_defines)
+ ET.SubElement(clconf, 'FunctionLevelLinking').text = 'true'
+ # Warning level
+ warning_level = self.get_option_for_target(OptionKey('warning_level'), target)
+ ET.SubElement(clconf, 'WarningLevel').text = 'Level' + str(1 + int(warning_level))
+ if self.get_option_for_target(OptionKey('werror'), target):
+ ET.SubElement(clconf, 'TreatWarningAsError').text = 'true'
+ # Optimization flags
+ o_flags = split_o_flags_args(build_args)
+ if '/Ox' in o_flags:
+ ET.SubElement(clconf, 'Optimization').text = 'Full'
+ elif '/O2' in o_flags:
+ ET.SubElement(clconf, 'Optimization').text = 'MaxSpeed'
+ elif '/O1' in o_flags:
+ ET.SubElement(clconf, 'Optimization').text = 'MinSpace'
+ elif '/Od' in o_flags:
+ ET.SubElement(clconf, 'Optimization').text = 'Disabled'
+ if '/Oi' in o_flags:
+ ET.SubElement(clconf, 'IntrinsicFunctions').text = 'true'
+ if '/Ob1' in o_flags:
+ ET.SubElement(clconf, 'InlineFunctionExpansion').text = 'OnlyExplicitInline'
+ elif '/Ob2' in o_flags:
+ ET.SubElement(clconf, 'InlineFunctionExpansion').text = 'AnySuitable'
+ # Size-preserving flags
+ if '/Os' in o_flags:
+ ET.SubElement(clconf, 'FavorSizeOrSpeed').text = 'Size'
+ else:
+ ET.SubElement(clconf, 'FavorSizeOrSpeed').text = 'Speed'
+ # Note: SuppressStartupBanner is /NOLOGO and is 'true' by default
+ pch_sources = {}
+ if self.environment.coredata.options.get(OptionKey('b_pch')):
+ for lang in ['c', 'cpp']:
+ pch = target.get_pch(lang)
+ if not pch:
+ continue
+ if compiler.id == 'msvc':
+ if len(pch) == 1:
+ # Auto generate PCH.
+ src = os.path.join(down, self.create_msvc_pch_implementation(target, lang, pch[0]))
+ pch_header_dir = os.path.dirname(os.path.join(proj_to_src_dir, pch[0]))
+ else:
+ src = os.path.join(proj_to_src_dir, pch[1])
+ pch_header_dir = None
+ pch_sources[lang] = [pch[0], src, lang, pch_header_dir]
+ else:
+ # I don't know whether its relevant but let's handle other compilers
+ # used with a vs backend
+ pch_sources[lang] = [pch[0], None, lang, None]
+
+ resourcecompile = ET.SubElement(compiles, 'ResourceCompile')
+ ET.SubElement(resourcecompile, 'PreprocessorDefinitions')
+
+ # Linker options
+ link = ET.SubElement(compiles, 'Link')
+ extra_link_args = compiler.compiler_args()
+ # FIXME: Can these buildtype linker args be added as tags in the
+ # vcxproj file (similar to buildtype compiler args) instead of in
+ # AdditionalOptions?
+ extra_link_args += compiler.get_buildtype_linker_args(self.buildtype)
+ # Generate Debug info
+ if self.debug:
+ self.generate_debug_information(link)
+ else:
+ ET.SubElement(link, 'GenerateDebugInformation').text = 'false'
+ if not isinstance(target, build.StaticLibrary):
+ if isinstance(target, build.SharedModule):
+ options = self.environment.coredata.options
+ extra_link_args += compiler.get_std_shared_module_link_args(options)
+ # Add link args added using add_project_link_arguments()
+ extra_link_args += self.build.get_project_link_args(compiler, target.subproject, target.for_machine)
+ # Add link args added using add_global_link_arguments()
+ # These override per-project link arguments
+ extra_link_args += self.build.get_global_link_args(compiler, target.for_machine)
+ # Link args added from the env: LDFLAGS, or the cross file. We want
+ # these to override all the defaults but not the per-target link
+ # args.
+ extra_link_args += self.environment.coredata.get_external_link_args(target.for_machine, compiler.get_language())
+ # Only non-static built targets need link args and link dependencies
+ extra_link_args += target.link_args
+ # External deps must be last because target link libraries may depend on them.
+ for dep in target.get_external_deps():
+ # Extend without reordering or de-dup to preserve `-L -l` sets
+ # https://github.com/mesonbuild/meson/issues/1718
+ if isinstance(dep, dependencies.OpenMPDependency):
+ ET.SubElement(clconf, 'OpenMPSuppport').text = 'true'
+ else:
+ extra_link_args.extend_direct(dep.get_link_args())
+ for d in target.get_dependencies():
+ if isinstance(d, build.StaticLibrary):
+ for dep in d.get_external_deps():
+ if isinstance(dep, dependencies.OpenMPDependency):
+ ET.SubElement(clconf, 'OpenMPSuppport').text = 'true'
+ else:
+ extra_link_args.extend_direct(dep.get_link_args())
+ # Add link args for c_* or cpp_* build options. Currently this only
+ # adds c_winlibs and cpp_winlibs when building for Windows. This needs
+ # to be after all internal and external libraries so that unresolved
+ # symbols from those can be found here. This is needed when the
+ # *_winlibs that we want to link to are static mingw64 libraries.
+ extra_link_args += compiler.get_option_link_args(self.environment.coredata.options)
+ (additional_libpaths, additional_links, extra_link_args) = self.split_link_args(extra_link_args.to_native())
+
+ # Add more libraries to be linked if needed
+ for t in target.get_dependencies():
+ if isinstance(t, build.CustomTargetIndex):
+ # We don't need the actual project here, just the library name
+ lobj = t
+ else:
+ lobj = self.build.targets[t.get_id()]
+ linkname = os.path.join(down, self.get_target_filename_for_linking(lobj))
+ if t in target.link_whole_targets:
+ if compiler.id == 'msvc' and version_compare(compiler.version, '<19.00.23918'):
+ # Expand our object lists manually if we are on pre-Visual Studio 2015 Update 2
+ l = t.extract_all_objects(False)
+
+ # Unforunately, we can't use self.object_filename_from_source()
+ gensrclist: T.List[File] = []
+ for gen in l.genlist:
+ for src in gen.get_outputs():
+ if self.environment.is_source(src) and not self.environment.is_header(src):
+ path = self.get_target_generated_dir(t, gen, src)
+ gen_src_ext = '.' + os.path.splitext(path)[1][1:]
+ extra_link_args.append(path[:-len(gen_src_ext)] + '.obj')
+
+ for src in l.srclist:
+ obj_basename = None
+ if self.environment.is_source(src) and not self.environment.is_header(src):
+ obj_basename = self.object_filename_from_source(t, src)
+ target_private_dir = self.relpath(self.get_target_private_dir(t),
+ self.get_target_dir(t))
+ rel_obj = os.path.join(target_private_dir, obj_basename)
+ extra_link_args.append(rel_obj)
+
+ extra_link_args.extend(self.flatten_object_list(t))
+ else:
+ # /WHOLEARCHIVE:foo must go into AdditionalOptions
+ extra_link_args += compiler.get_link_whole_for(linkname)
+ # To force Visual Studio to build this project even though it
+ # has no sources, we include a reference to the vcxproj file
+ # that builds this target. Technically we should add this only
+ # if the current target has no sources, but it doesn't hurt to
+ # have 'extra' references.
+ trelpath = self.get_target_dir_relative_to(t, target)
+ tvcxproj = os.path.join(trelpath, t.get_id() + '.vcxproj')
+ tid = self.environment.coredata.target_guids[t.get_id()]
+ self.add_project_reference(root, tvcxproj, tid, link_outputs=True)
+ # Mark the dependency as already handled to not have
+ # multiple references to the same target.
+ self.handled_target_deps[target.get_id()].append(t.get_id())
+ else:
+ # Other libraries go into AdditionalDependencies
+ if linkname not in additional_links:
+ additional_links.append(linkname)
+ for lib in self.get_custom_target_provided_libraries(target):
+ additional_links.append(self.relpath(lib, self.get_target_dir(target)))
+ additional_objects = []
+ for o in self.flatten_object_list(target, down):
+ assert(isinstance(o, str))
+ additional_objects.append(o)
+ for o in custom_objs:
+ additional_objects.append(o)
+
+ if len(extra_link_args) > 0:
+ extra_link_args.append('%(AdditionalOptions)')
+ ET.SubElement(link, "AdditionalOptions").text = ' '.join(extra_link_args)
+ if len(additional_libpaths) > 0:
+ additional_libpaths.insert(0, '%(AdditionalLibraryDirectories)')
+ ET.SubElement(link, 'AdditionalLibraryDirectories').text = ';'.join(additional_libpaths)
+ if len(additional_links) > 0:
+ additional_links.append('%(AdditionalDependencies)')
+ ET.SubElement(link, 'AdditionalDependencies').text = ';'.join(additional_links)
+ ofile = ET.SubElement(link, 'OutputFile')
+ ofile.text = '$(OutDir)%s' % target.get_filename()
+ subsys = ET.SubElement(link, 'SubSystem')
+ subsys.text = subsystem
+ if (isinstance(target, build.SharedLibrary) or isinstance(target, build.Executable)) and target.get_import_filename():
+ # DLLs built with MSVC always have an import library except when
+ # they're data-only DLLs, but we don't support those yet.
+ ET.SubElement(link, 'ImportLibrary').text = target.get_import_filename()
+ if isinstance(target, build.SharedLibrary):
+ # Add module definitions file, if provided
+ if target.vs_module_defs:
+ relpath = os.path.join(down, target.vs_module_defs.rel_to_builddir(self.build_to_src))
+ ET.SubElement(link, 'ModuleDefinitionFile').text = relpath
+ if self.debug:
+ pdb = ET.SubElement(link, 'ProgramDataBaseFileName')
+ pdb.text = '$(OutDir}%s.pdb' % target_name
+ targetmachine = ET.SubElement(link, 'TargetMachine')
+ targetplatform = self.platform.lower()
+ if targetplatform == 'win32':
+ targetmachine.text = 'MachineX86'
+ elif targetplatform == 'x64':
+ targetmachine.text = 'MachineX64'
+ elif targetplatform == 'arm':
+ targetmachine.text = 'MachineARM'
+ elif targetplatform == 'arm64':
+ targetmachine.text = 'MachineARM64'
+ elif targetplatform == 'arm64ec':
+ targetmachine.text = 'MachineARM64EC'
+ else:
+ raise MesonException('Unsupported Visual Studio target machine: ' + targetplatform)
+ # /nologo
+ ET.SubElement(link, 'SuppressStartupBanner').text = 'true'
+ # /release
+ if not self.environment.coredata.get_option(OptionKey('debug')):
+ ET.SubElement(link, 'SetChecksum').text = 'true'
+
+ meson_file_group = ET.SubElement(root, 'ItemGroup')
+ ET.SubElement(meson_file_group, 'None', Include=os.path.join(proj_to_src_dir, build_filename))
+
+ # Visual Studio can't load projects that present duplicated items. Filter them out
+ # by keeping track of already added paths.
+ def path_normalize_add(path, lis):
+ normalized = os.path.normcase(os.path.normpath(path))
+ if normalized not in lis:
+ lis.append(normalized)
+ return True
+ else:
+ return False
+
+ previous_includes = []
+ if len(headers) + len(gen_hdrs) + len(target.extra_files) + len(pch_sources) > 0:
+ inc_hdrs = ET.SubElement(root, 'ItemGroup')
+ for h in headers:
+ relpath = os.path.join(down, h.rel_to_builddir(self.build_to_src))
+ if path_normalize_add(relpath, previous_includes):
+ ET.SubElement(inc_hdrs, 'CLInclude', Include=relpath)
+ for h in gen_hdrs:
+ if path_normalize_add(h, previous_includes):
+ ET.SubElement(inc_hdrs, 'CLInclude', Include=h)
+ for h in target.extra_files:
+ relpath = os.path.join(down, h.rel_to_builddir(self.build_to_src))
+ if path_normalize_add(relpath, previous_includes):
+ ET.SubElement(inc_hdrs, 'CLInclude', Include=relpath)
+ for lang in pch_sources:
+ h = pch_sources[lang][0]
+ path = os.path.join(proj_to_src_dir, h)
+ if path_normalize_add(path, previous_includes):
+ ET.SubElement(inc_hdrs, 'CLInclude', Include=path)
+
+ previous_sources = []
+ if len(sources) + len(gen_src) + len(pch_sources) > 0:
+ inc_src = ET.SubElement(root, 'ItemGroup')
+ for s in sources:
+ relpath = os.path.join(down, s.rel_to_builddir(self.build_to_src))
+ if path_normalize_add(relpath, previous_sources):
+ inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=relpath)
+ lang = Vs2010Backend.lang_from_source_file(s)
+ self.add_pch(pch_sources, lang, inc_cl)
+ self.add_additional_options(lang, inc_cl, file_args)
+ self.add_preprocessor_defines(lang, inc_cl, file_defines)
+ self.add_include_dirs(lang, inc_cl, file_inc_dirs)
+ ET.SubElement(inc_cl, 'ObjectFileName').text = "$(IntDir)" + self.object_filename_from_source(target, s)
+ for s in gen_src:
+ if path_normalize_add(s, previous_sources):
+ inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=s)
+ lang = Vs2010Backend.lang_from_source_file(s)
+ self.add_pch(pch_sources, lang, inc_cl)
+ self.add_additional_options(lang, inc_cl, file_args)
+ self.add_preprocessor_defines(lang, inc_cl, file_defines)
+ self.add_include_dirs(lang, inc_cl, file_inc_dirs)
+ for lang in pch_sources:
+ impl = pch_sources[lang][1]
+ if impl and path_normalize_add(impl, previous_sources):
+ inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=impl)
+ self.create_pch(pch_sources, lang, inc_cl)
+ self.add_additional_options(lang, inc_cl, file_args)
+ self.add_preprocessor_defines(lang, inc_cl, file_defines)
+ pch_header_dir = pch_sources[lang][3]
+ if pch_header_dir:
+ inc_dirs = copy.deepcopy(file_inc_dirs)
+ inc_dirs[lang] = [pch_header_dir] + inc_dirs[lang]
+ else:
+ inc_dirs = file_inc_dirs
+ self.add_include_dirs(lang, inc_cl, inc_dirs)
+
+ previous_objects = []
+ if self.has_objects(objects, additional_objects, gen_objs):
+ inc_objs = ET.SubElement(root, 'ItemGroup')
+ for s in objects:
+ relpath = os.path.join(down, s.rel_to_builddir(self.build_to_src))
+ if path_normalize_add(relpath, previous_objects):
+ ET.SubElement(inc_objs, 'Object', Include=relpath)
+ for s in additional_objects:
+ if path_normalize_add(s, previous_objects):
+ ET.SubElement(inc_objs, 'Object', Include=s)
+ self.add_generated_objects(inc_objs, gen_objs)
+
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
+ self.add_regen_dependency(root)
+ self.add_target_deps(root, target)
+ self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
+
+ def gen_regenproj(self, project_name, ofname):
+ root = ET.Element('Project', {'DefaultTargets': 'Build',
+ 'ToolsVersion': '4.0',
+ 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'})
+ confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'})
+ prjconf = ET.SubElement(confitems, 'ProjectConfiguration',
+ {'Include': self.buildtype + '|' + self.platform})
+ p = ET.SubElement(prjconf, 'Configuration')
+ p.text = self.buildtype
+ pl = ET.SubElement(prjconf, 'Platform')
+ pl.text = self.platform
+ globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals')
+ guidelem = ET.SubElement(globalgroup, 'ProjectGuid')
+ guidelem.text = '{%s}' % self.environment.coredata.regen_guid
+ kw = ET.SubElement(globalgroup, 'Keyword')
+ kw.text = self.platform + 'Proj'
+ p = ET.SubElement(globalgroup, 'Platform')
+ p.text = self.platform
+ pname = ET.SubElement(globalgroup, 'ProjectName')
+ pname.text = project_name
+ if self.windows_target_platform_version:
+ ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
+ ET.SubElement(type_config, 'ConfigurationType').text = "Utility"
+ ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
+ ET.SubElement(type_config, 'UseOfMfc').text = 'false'
+ if self.platform_toolset:
+ ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
+ direlem = ET.SubElement(root, 'PropertyGroup')
+ fver = ET.SubElement(direlem, '_ProjectFileVersion')
+ fver.text = self.project_file_version
+ outdir = ET.SubElement(direlem, 'OutDir')
+ outdir.text = '.\\'
+ intdir = ET.SubElement(direlem, 'IntDir')
+ intdir.text = 'regen-temp\\'
+ tname = ET.SubElement(direlem, 'TargetName')
+ tname.text = project_name
+
+ action = ET.SubElement(root, 'ItemDefinitionGroup')
+ midl = ET.SubElement(action, 'Midl')
+ ET.SubElement(midl, "AdditionalIncludeDirectories").text = '%(AdditionalIncludeDirectories)'
+ ET.SubElement(midl, "OutputDirectory").text = '$(IntDir)'
+ ET.SubElement(midl, 'HeaderFileName').text = '%(Filename).h'
+ ET.SubElement(midl, 'TypeLibraryName').text = '%(Filename).tlb'
+ ET.SubElement(midl, 'InterfaceIdentifierFilename').text = '%(Filename)_i.c'
+ ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c'
+ regen_command = self.environment.get_build_command() + ['--internal', 'regencheck']
+ cmd_templ = '''call %s > NUL
+"%s" "%s"'''
+ regen_command = cmd_templ % \
+ (self.get_vcvars_command(), '" "'.join(regen_command), self.environment.get_scratch_dir())
+ self.add_custom_build(root, 'regen', regen_command, deps=self.get_regen_filelist(),
+ outputs=[Vs2010Backend.get_regen_stampfile(self.environment.get_build_dir())],
+ msg='Checking whether solution needs to be regenerated.')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
+ ET.SubElement(root, 'ImportGroup', Label='ExtensionTargets')
+ self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
+
+ def gen_testproj(self, target_name, ofname):
+ project_name = target_name
+ root = ET.Element('Project', {'DefaultTargets': "Build",
+ 'ToolsVersion': '4.0',
+ 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'})
+ confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'})
+ prjconf = ET.SubElement(confitems, 'ProjectConfiguration',
+ {'Include': self.buildtype + '|' + self.platform})
+ p = ET.SubElement(prjconf, 'Configuration')
+ p.text = self.buildtype
+ pl = ET.SubElement(prjconf, 'Platform')
+ pl.text = self.platform
+ globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals')
+ guidelem = ET.SubElement(globalgroup, 'ProjectGuid')
+ guidelem.text = '{%s}' % self.environment.coredata.test_guid
+ kw = ET.SubElement(globalgroup, 'Keyword')
+ kw.text = self.platform + 'Proj'
+ p = ET.SubElement(globalgroup, 'Platform')
+ p.text = self.platform
+ pname = ET.SubElement(globalgroup, 'ProjectName')
+ pname.text = project_name
+ if self.windows_target_platform_version:
+ ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
+ ET.SubElement(type_config, 'ConfigurationType')
+ ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
+ ET.SubElement(type_config, 'UseOfMfc').text = 'false'
+ if self.platform_toolset:
+ ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
+ direlem = ET.SubElement(root, 'PropertyGroup')
+ fver = ET.SubElement(direlem, '_ProjectFileVersion')
+ fver.text = self.project_file_version
+ outdir = ET.SubElement(direlem, 'OutDir')
+ outdir.text = '.\\'
+ intdir = ET.SubElement(direlem, 'IntDir')
+ intdir.text = 'test-temp\\'
+ tname = ET.SubElement(direlem, 'TargetName')
+ tname.text = target_name
+
+ action = ET.SubElement(root, 'ItemDefinitionGroup')
+ midl = ET.SubElement(action, 'Midl')
+ ET.SubElement(midl, "AdditionalIncludeDirectories").text = '%(AdditionalIncludeDirectories)'
+ ET.SubElement(midl, "OutputDirectory").text = '$(IntDir)'
+ ET.SubElement(midl, 'HeaderFileName').text = '%(Filename).h'
+ ET.SubElement(midl, 'TypeLibraryName').text = '%(Filename).tlb'
+ ET.SubElement(midl, 'InterfaceIdentifierFilename').text = '%(Filename)_i.c'
+ ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c'
+ # FIXME: No benchmarks?
+ test_command = self.environment.get_build_command() + ['test', '--no-rebuild']
+ if not self.environment.coredata.get_option(OptionKey('stdsplit')):
+ test_command += ['--no-stdsplit']
+ if self.environment.coredata.get_option(OptionKey('errorlogs')):
+ test_command += ['--print-errorlogs']
+ self.serialize_tests()
+ self.add_custom_build(root, 'run_tests', '"%s"' % ('" "'.join(test_command)))
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
+ self.add_regen_dependency(root)
+ self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
+
+ def gen_installproj(self, target_name, ofname):
+ self.create_install_data_files()
+ project_name = target_name
+ root = ET.Element('Project', {'DefaultTargets': "Build",
+ 'ToolsVersion': '4.0',
+ 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'})
+ confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'})
+ prjconf = ET.SubElement(confitems, 'ProjectConfiguration',
+ {'Include': self.buildtype + '|' + self.platform})
+ p = ET.SubElement(prjconf, 'Configuration')
+ p.text = self.buildtype
+ pl = ET.SubElement(prjconf, 'Platform')
+ pl.text = self.platform
+ globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals')
+ guidelem = ET.SubElement(globalgroup, 'ProjectGuid')
+ guidelem.text = '{%s}' % self.environment.coredata.install_guid
+ kw = ET.SubElement(globalgroup, 'Keyword')
+ kw.text = self.platform + 'Proj'
+ p = ET.SubElement(globalgroup, 'Platform')
+ p.text = self.platform
+ pname = ET.SubElement(globalgroup, 'ProjectName')
+ pname.text = project_name
+ if self.windows_target_platform_version:
+ ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
+ ET.SubElement(type_config, 'ConfigurationType')
+ ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
+ ET.SubElement(type_config, 'UseOfMfc').text = 'false'
+ if self.platform_toolset:
+ ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
+ direlem = ET.SubElement(root, 'PropertyGroup')
+ fver = ET.SubElement(direlem, '_ProjectFileVersion')
+ fver.text = self.project_file_version
+ outdir = ET.SubElement(direlem, 'OutDir')
+ outdir.text = '.\\'
+ intdir = ET.SubElement(direlem, 'IntDir')
+ intdir.text = 'install-temp\\'
+ tname = ET.SubElement(direlem, 'TargetName')
+ tname.text = target_name
+
+ action = ET.SubElement(root, 'ItemDefinitionGroup')
+ midl = ET.SubElement(action, 'Midl')
+ ET.SubElement(midl, "AdditionalIncludeDirectories").text = '%(AdditionalIncludeDirectories)'
+ ET.SubElement(midl, "OutputDirectory").text = '$(IntDir)'
+ ET.SubElement(midl, 'HeaderFileName').text = '%(Filename).h'
+ ET.SubElement(midl, 'TypeLibraryName').text = '%(Filename).tlb'
+ ET.SubElement(midl, 'InterfaceIdentifierFilename').text = '%(Filename)_i.c'
+ ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c'
+ install_command = self.environment.get_build_command() + ['install', '--no-rebuild']
+ self.add_custom_build(root, 'run_install', '"%s"' % ('" "'.join(install_command)))
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
+ self.add_regen_dependency(root)
+ self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
+
+ def add_custom_build(self, node, rulename, command, deps=None, outputs=None, msg=None, verify_files=True):
+ igroup = ET.SubElement(node, 'ItemGroup')
+ rulefile = os.path.join(self.environment.get_scratch_dir(), rulename + '.rule')
+ if not os.path.exists(rulefile):
+ with open(rulefile, 'w', encoding='utf-8') as f:
+ f.write("# Meson regen file.")
+ custombuild = ET.SubElement(igroup, 'CustomBuild', Include=rulefile)
+ if msg:
+ message = ET.SubElement(custombuild, 'Message')
+ message.text = msg
+ if not verify_files:
+ ET.SubElement(custombuild, 'VerifyInputsAndOutputsExist').text = 'false'
+ cmd_templ = '''setlocal
+%s
+if %%errorlevel%% neq 0 goto :cmEnd
+:cmEnd
+endlocal & call :cmErrorLevel %%errorlevel%% & goto :cmDone
+:cmErrorLevel
+exit /b %%1
+:cmDone
+if %%errorlevel%% neq 0 goto :VCEnd'''
+ ET.SubElement(custombuild, 'Command').text = cmd_templ % command
+ if not outputs:
+ # Use a nonexistent file to always consider the target out-of-date.
+ outputs = [self.nonexistent_file(os.path.join(self.environment.get_scratch_dir(),
+ 'outofdate.file'))]
+ ET.SubElement(custombuild, 'Outputs').text = ';'.join(outputs)
+ if deps:
+ ET.SubElement(custombuild, 'AdditionalInputs').text = ';'.join(deps)
+
+ @staticmethod
+ def nonexistent_file(prefix):
+ i = 0
+ file = prefix
+ while os.path.exists(file):
+ file = '%s%d' % (prefix, i)
+ return file
+
+ def generate_debug_information(self, link):
+ # valid values for vs2015 is 'false', 'true', 'DebugFastLink'
+ ET.SubElement(link, 'GenerateDebugInformation').text = 'true'
+
+ def add_regen_dependency(self, root):
+ regen_vcxproj = os.path.join(self.environment.get_build_dir(), 'REGEN.vcxproj')
+ self.add_project_reference(root, regen_vcxproj, self.environment.coredata.regen_guid)
diff --git a/meson/mesonbuild/backend/vs2012backend.py b/meson/mesonbuild/backend/vs2012backend.py
new file mode 100644
index 000000000..a9ba5f476
--- /dev/null
+++ b/meson/mesonbuild/backend/vs2012backend.py
@@ -0,0 +1,38 @@
+# Copyright 2014-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .vs2010backend import Vs2010Backend
+from ..mesonlib import MesonException
+from ..interpreter import Interpreter
+from ..build import Build
+import typing as T
+
+
+class Vs2012Backend(Vs2010Backend):
+ def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]):
+ super().__init__(build, interpreter)
+ self.name = 'vs2012'
+ self.vs_version = '2012'
+ if self.environment is not None:
+ # TODO: we assume host == build
+ comps = self.environment.coredata.compilers.host
+ if comps and all(c.id == 'intel-cl' for c in comps.values()):
+ c = list(comps.values())[0]
+ if c.version.startswith('19'):
+ self.platform_toolset = 'Intel C++ Compiler 19.0'
+ else:
+ # We don't have support for versions older than 2019 right now.
+ raise MesonException('There is currently no support for ICL before 19, patches welcome.')
+ if self.platform_toolset is None:
+ self.platform_toolset = 'v110'
diff --git a/meson/mesonbuild/backend/vs2013backend.py b/meson/mesonbuild/backend/vs2013backend.py
new file mode 100644
index 000000000..0f2c8bdc6
--- /dev/null
+++ b/meson/mesonbuild/backend/vs2013backend.py
@@ -0,0 +1,38 @@
+# Copyright 2014-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .vs2010backend import Vs2010Backend
+from ..mesonlib import MesonException
+from ..interpreter import Interpreter
+from ..build import Build
+import typing as T
+
+
+class Vs2013Backend(Vs2010Backend):
+ def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]):
+ super().__init__(build, interpreter)
+ self.name = 'vs2013'
+ self.vs_version = '2013'
+ if self.environment is not None:
+ # TODO: we assume host == build
+ comps = self.environment.coredata.compilers.host
+ if comps and all(c.id == 'intel-cl' for c in comps.values()):
+ c = list(comps.values())[0]
+ if c.version.startswith('19'):
+ self.platform_toolset = 'Intel C++ Compiler 19.0'
+ else:
+ # We don't have support for versions older than 2019 right now.
+ raise MesonException('There is currently no support for ICL before 19, patches welcome.')
+ if self.platform_toolset is None:
+ self.platform_toolset = 'v120'
diff --git a/meson/mesonbuild/backend/vs2015backend.py b/meson/mesonbuild/backend/vs2015backend.py
new file mode 100644
index 000000000..bdc1675a2
--- /dev/null
+++ b/meson/mesonbuild/backend/vs2015backend.py
@@ -0,0 +1,38 @@
+# Copyright 2014-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .vs2010backend import Vs2010Backend
+from ..mesonlib import MesonException
+from ..interpreter import Interpreter
+from ..build import Build
+import typing as T
+
+
+class Vs2015Backend(Vs2010Backend):
+ def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]):
+ super().__init__(build, interpreter)
+ self.name = 'vs2015'
+ self.vs_version = '2015'
+ if self.environment is not None:
+ # TODO: we assume host == build
+ comps = self.environment.coredata.compilers.host
+ if comps and all(c.id == 'intel-cl' for c in comps.values()):
+ c = list(comps.values())[0]
+ if c.version.startswith('19'):
+ self.platform_toolset = 'Intel C++ Compiler 19.0'
+ else:
+ # We don't have support for versions older than 2019 right now.
+ raise MesonException('There is currently no support for ICL before 19, patches welcome.')
+ if self.platform_toolset is None:
+ self.platform_toolset = 'v140'
diff --git a/meson/mesonbuild/backend/vs2017backend.py b/meson/mesonbuild/backend/vs2017backend.py
new file mode 100644
index 000000000..fa216065c
--- /dev/null
+++ b/meson/mesonbuild/backend/vs2017backend.py
@@ -0,0 +1,52 @@
+# Copyright 2014-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import typing as T
+import xml.etree.ElementTree as ET
+
+from .vs2010backend import Vs2010Backend
+from ..mesonlib import MesonException
+from ..interpreter import Interpreter
+from ..build import Build
+
+
+class Vs2017Backend(Vs2010Backend):
+ def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]):
+ super().__init__(build, interpreter)
+ self.name = 'vs2017'
+ self.vs_version = '2017'
+ # We assume that host == build
+ if self.environment is not None:
+ comps = self.environment.coredata.compilers.host
+ if comps:
+ if comps and all(c.id == 'clang-cl' for c in comps.values()):
+ self.platform_toolset = 'llvm'
+ elif comps and all(c.id == 'intel-cl' for c in comps.values()):
+ c = list(comps.values())[0]
+ if c.version.startswith('19'):
+ self.platform_toolset = 'Intel C++ Compiler 19.0'
+ else:
+ # We don't have support for versions older than 2019 right now.
+ raise MesonException('There is currently no support for ICL before 19, patches welcome.')
+ if self.platform_toolset is None:
+ self.platform_toolset = 'v141'
+ # WindowsSDKVersion should be set by command prompt.
+ sdk_version = os.environ.get('WindowsSDKVersion', None)
+ if sdk_version:
+ self.windows_target_platform_version = sdk_version.rstrip('\\')
+
+ def generate_debug_information(self, link):
+ # valid values for vs2017 is 'false', 'true', 'DebugFastLink', 'DebugFull'
+ ET.SubElement(link, 'GenerateDebugInformation').text = 'DebugFull'
diff --git a/meson/mesonbuild/backend/vs2019backend.py b/meson/mesonbuild/backend/vs2019backend.py
new file mode 100644
index 000000000..8f304e48e
--- /dev/null
+++ b/meson/mesonbuild/backend/vs2019backend.py
@@ -0,0 +1,47 @@
+# Copyright 2014-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import typing as T
+import xml.etree.ElementTree as ET
+
+from .vs2010backend import Vs2010Backend
+from ..interpreter import Interpreter
+from ..build import Build
+
+
+class Vs2019Backend(Vs2010Backend):
+ def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]):
+ super().__init__(build, interpreter)
+ self.name = 'vs2019'
+ if self.environment is not None:
+ comps = self.environment.coredata.compilers.host
+ if comps and all(c.id == 'clang-cl' for c in comps.values()):
+ self.platform_toolset = 'ClangCL'
+ elif comps and all(c.id == 'intel-cl' for c in comps.values()):
+ c = list(comps.values())[0]
+ if c.version.startswith('19'):
+ self.platform_toolset = 'Intel C++ Compiler 19.0'
+ # We don't have support for versions older than 2019 right now.
+ if not self.platform_toolset:
+ self.platform_toolset = 'v142'
+ self.vs_version = '2019'
+ # WindowsSDKVersion should be set by command prompt.
+ sdk_version = os.environ.get('WindowsSDKVersion', None)
+ if sdk_version:
+ self.windows_target_platform_version = sdk_version.rstrip('\\')
+
+ def generate_debug_information(self, link):
+ # valid values for vs2019 is 'false', 'true', 'DebugFastLink', 'DebugFull'
+ ET.SubElement(link, 'GenerateDebugInformation').text = 'DebugFull'
diff --git a/meson/mesonbuild/backend/xcodebackend.py b/meson/mesonbuild/backend/xcodebackend.py
new file mode 100644
index 000000000..ff48ecf1c
--- /dev/null
+++ b/meson/mesonbuild/backend/xcodebackend.py
@@ -0,0 +1,1708 @@
+# Copyright 2014-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import backends
+from .. import build
+from .. import dependencies
+from .. import mesonlib
+from .. import mlog
+import uuid, os, operator
+import typing as T
+
+from ..mesonlib import MesonException, OptionKey
+from ..interpreter import Interpreter
+
+INDENT = '\t'
+XCODETYPEMAP = {'c': 'sourcecode.c.c',
+ 'a': 'archive.ar',
+ 'cc': 'sourcecode.cpp.cpp',
+ 'cxx': 'sourcecode.cpp.cpp',
+ 'cpp': 'sourcecode.cpp.cpp',
+ 'c++': 'sourcecode.cpp.cpp',
+ 'm': 'sourcecode.c.objc',
+ 'mm': 'sourcecode.cpp.objcpp',
+ 'h': 'sourcecode.c.h',
+ 'hpp': 'sourcecode.cpp.h',
+ 'hxx': 'sourcecode.cpp.h',
+ 'hh': 'sourcecode.cpp.hh',
+ 'inc': 'sourcecode.c.h',
+ 'swift': 'sourcecode.swift',
+ 'dylib': 'compiled.mach-o.dylib',
+ 'o': 'compiled.mach-o.objfile',
+ 's': 'sourcecode.asm',
+ 'asm': 'sourcecode.asm',
+ }
+LANGNAMEMAP = {'c': 'C',
+ 'cpp': 'CPLUSPLUS',
+ 'objc': 'OBJC',
+ 'objcpp': 'OBJCPLUSPLUS',
+ 'swift': 'SWIFT_'
+ }
+OPT2XCODEOPT = {'0': '0',
+ 'g': '0',
+ '1': '1',
+ '2': '2',
+ '3': '3',
+ 's': 's',
+ }
+BOOL2XCODEBOOL = {True: 'YES', False: 'NO'}
+LINKABLE_EXTENSIONS = {'.o', '.a', '.obj', '.so', '.dylib'}
+
+class FileTreeEntry:
+
+ def __init__(self):
+ self.subdirs = {}
+ self.targets = []
+
+class PbxItem:
+ def __init__(self, value, comment = ''):
+ self.value = value
+ self.comment = comment
+
+class PbxArray:
+ def __init__(self):
+ self.items = []
+
+ def add_item(self, item, comment=''):
+ if isinstance(item, PbxArrayItem):
+ self.items.append(item)
+ else:
+ self.items.append(PbxArrayItem(item, comment))
+
+ def write(self, ofile, indent_level):
+ ofile.write('(\n')
+ indent_level += 1
+ for i in self.items:
+ if i.comment:
+ ofile.write(indent_level*INDENT + f'{i.value} {i.comment},\n')
+ else:
+ ofile.write(indent_level*INDENT + f'{i.value},\n')
+ indent_level -= 1
+ ofile.write(indent_level*INDENT + ');\n')
+
+class PbxArrayItem:
+ def __init__(self, value, comment = ''):
+ self.value = value
+ if comment:
+ if '/*' in comment:
+ self.comment = comment
+ else:
+ self.comment = f'/* {comment} */'
+ else:
+ self.comment = comment
+
+class PbxComment:
+ def __init__(self, text):
+ assert(isinstance(text, str))
+ assert('/*' not in text)
+ self.text = f'/* {text} */'
+
+ def write(self, ofile, indent_level):
+ ofile.write(f'\n{self.text}\n')
+
+class PbxDictItem:
+ def __init__(self, key, value, comment = ''):
+ self.key = key
+ self.value = value
+ if comment:
+ if '/*' in comment:
+ self.comment = comment
+ else:
+ self.comment = f'/* {comment} */'
+ else:
+ self.comment = comment
+
+class PbxDict:
+ def __init__(self):
+ # This class is a bit weird, because we want to write PBX dicts in
+ # defined order _and_ we want to write intermediate comments also in order.
+ self.keys = set()
+ self.items = []
+
+ def add_item(self, key, value, comment=''):
+ item = PbxDictItem(key, value, comment)
+ assert(key not in self.keys)
+ self.keys.add(key)
+ self.items.append(item)
+
+ def add_comment(self, comment):
+ if isinstance(comment, str):
+ self.items.append(PbxComment(str))
+ else:
+ assert(isinstance(comment, PbxComment))
+ self.items.append(comment)
+
+ def write(self, ofile, indent_level):
+ ofile.write('{\n')
+ indent_level += 1
+ for i in self.items:
+ if isinstance(i, PbxComment):
+ i.write(ofile, indent_level)
+ elif isinstance(i, PbxDictItem):
+ if isinstance(i.value, (str, int)):
+ if i.comment:
+ ofile.write(indent_level*INDENT + f'{i.key} = {i.value} {i.comment};\n')
+ else:
+ ofile.write(indent_level*INDENT + f'{i.key} = {i.value};\n')
+ elif isinstance(i.value, PbxDict):
+ if i.comment:
+ ofile.write(indent_level*INDENT + f'{i.key} {i.comment} = ')
+ else:
+ ofile.write(indent_level*INDENT + f'{i.key} = ')
+ i.value.write(ofile, indent_level)
+ elif isinstance(i.value, PbxArray):
+ if i.comment:
+ ofile.write(indent_level*INDENT + f'{i.key} {i.comment} = ')
+ else:
+ ofile.write(indent_level*INDENT + f'{i.key} = ')
+ i.value.write(ofile, indent_level)
+ else:
+ print(i)
+ print(i.key)
+ print(i.value)
+ raise RuntimeError('missing code')
+ else:
+ print(i)
+ raise RuntimeError('missing code2')
+
+ indent_level -= 1
+ ofile.write(indent_level*INDENT + '}')
+ if indent_level == 0:
+ ofile.write('\n')
+ else:
+ ofile.write(';\n')
+
+class XCodeBackend(backends.Backend):
+ def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]):
+ super().__init__(build, interpreter)
+ self.name = 'xcode'
+ self.project_uid = self.environment.coredata.lang_guids['default'].replace('-', '')[:24]
+ self.buildtype = self.environment.coredata.get_option(OptionKey('buildtype'))
+ self.project_conflist = self.gen_id()
+ self.maingroup_id = self.gen_id()
+ self.all_id = self.gen_id()
+ self.all_buildconf_id = self.gen_id()
+ self.buildtypes = [self.buildtype]
+ self.test_id = self.gen_id()
+ self.test_command_id = self.gen_id()
+ self.test_buildconf_id = self.gen_id()
+ self.regen_id = self.gen_id()
+ self.regen_command_id = self.gen_id()
+ self.regen_buildconf_id = self.gen_id()
+ self.regen_dependency_id = self.gen_id()
+ self.top_level_dict = PbxDict()
+ self.generator_outputs = {}
+ # In Xcode files are not accessed via their file names, but rather every one of them
+ # gets an unique id. More precisely they get one unique id per target they are used
+ # in. If you generate only one id per file and use them, compilation will work but the
+ # UI will only show the file in one target but not the others. Thus they key is
+ # a tuple containing the target and filename.
+ self.buildfile_ids = {}
+ # That is not enough, though. Each target/file combination also gets a unique id
+ # in the file reference section. Because why not. This means that a source file
+ # that is used in two targets gets a total of four unique ID numbers.
+ self.fileref_ids = {}
+
+ def write_pbxfile(self, top_level_dict, ofilename):
+ tmpname = ofilename + '.tmp'
+ with open(tmpname, 'w', encoding='utf-8') as ofile:
+ ofile.write('// !$*UTF8*$!\n')
+ top_level_dict.write(ofile, 0)
+ os.replace(tmpname, ofilename)
+
+ def gen_id(self):
+ return str(uuid.uuid4()).upper().replace('-', '')[:24]
+
+ def get_target_dir(self, target):
+ dirname = os.path.join(target.get_subdir(), self.environment.coredata.get_option(OptionKey('buildtype')))
+ #os.makedirs(os.path.join(self.environment.get_build_dir(), dirname), exist_ok=True)
+ return dirname
+
+ def get_custom_target_output_dir(self, target):
+ dirname = target.get_subdir()
+ os.makedirs(os.path.join(self.environment.get_build_dir(), dirname), exist_ok=True)
+ return dirname
+
+ def target_to_build_root(self, target):
+ if self.get_target_dir(target) == '':
+ return ''
+ directories = os.path.normpath(self.get_target_dir(target)).split(os.sep)
+ return os.sep.join(['..'] * len(directories))
+
+ def object_filename_from_source(self, target, source):
+ # Xcode has the following naming scheme:
+ # projectname.build/debug/prog@exe.build/Objects-normal/x86_64/func.o
+ project = self.build.project_name
+ buildtype = self.buildtype
+ tname = target.get_id()
+ arch = 'x86_64'
+ if isinstance(source, mesonlib.File):
+ source = source.fname
+ stem = os.path.splitext(os.path.basename(source))[0]
+ return f'{project}.build/{buildtype}/{tname}.build/Objects-normal/{arch}/{stem}.o'
+
+ def generate(self):
+ self.serialize_tests()
+ # Cache the result as the method rebuilds the array every time it is called.
+ self.build_targets = self.build.get_build_targets()
+ self.custom_targets = self.build.get_custom_targets()
+ self.generate_filemap()
+ self.generate_buildstylemap()
+ self.generate_build_phase_map()
+ self.generate_build_configuration_map()
+ self.generate_build_configurationlist_map()
+ self.generate_project_configurations_map()
+ self.generate_buildall_configurations_map()
+ self.generate_test_configurations_map()
+ self.generate_native_target_map()
+ self.generate_native_frameworks_map()
+ self.generate_custom_target_map()
+ self.generate_generator_target_map()
+ self.generate_source_phase_map()
+ self.generate_target_dependency_map()
+ self.generate_pbxdep_map()
+ self.generate_containerproxy_map()
+ self.generate_target_file_maps()
+ self.generate_build_file_maps()
+ self.proj_dir = os.path.join(self.environment.get_build_dir(), self.build.project_name + '.xcodeproj')
+ os.makedirs(self.proj_dir, exist_ok=True)
+ self.proj_file = os.path.join(self.proj_dir, 'project.pbxproj')
+ objects_dict = self.generate_prefix(self.top_level_dict)
+ objects_dict.add_comment(PbxComment('Begin PBXAggregateTarget section'))
+ self.generate_pbx_aggregate_target(objects_dict)
+ objects_dict.add_comment(PbxComment('End PBXAggregateTarget section'))
+ objects_dict.add_comment(PbxComment('Begin PBXBuildFile section'))
+ self.generate_pbx_build_file(objects_dict)
+ objects_dict.add_comment(PbxComment('End PBXBuildFile section'))
+ objects_dict.add_comment(PbxComment('Begin PBXBuildStyle section'))
+ self.generate_pbx_build_style(objects_dict)
+ objects_dict.add_comment(PbxComment('End PBXBuildStyle section'))
+ objects_dict.add_comment(PbxComment('Begin PBXContainerItemProxy section'))
+ self.generate_pbx_container_item_proxy(objects_dict)
+ objects_dict.add_comment(PbxComment('End PBXContainerItemProxy section'))
+ objects_dict.add_comment(PbxComment('Begin PBXFileReference section'))
+ self.generate_pbx_file_reference(objects_dict)
+ objects_dict.add_comment(PbxComment('End PBXFileReference section'))
+ objects_dict.add_comment(PbxComment('Begin PBXFrameworksBuildPhase section'))
+ self.generate_pbx_frameworks_buildphase(objects_dict)
+ objects_dict.add_comment(PbxComment('End PBXFrameworksBuildPhase section'))
+ objects_dict.add_comment(PbxComment('Begin PBXGroup section'))
+ self.generate_pbx_group(objects_dict)
+ objects_dict.add_comment(PbxComment('End PBXGroup section'))
+ objects_dict.add_comment(PbxComment('Begin PBXNativeTarget section'))
+ self.generate_pbx_native_target(objects_dict)
+ objects_dict.add_comment(PbxComment('End PBXNativeTarget section'))
+ objects_dict.add_comment(PbxComment('Begin PBXProject section'))
+ self.generate_pbx_project(objects_dict)
+ objects_dict.add_comment(PbxComment('End PBXProject section'))
+ objects_dict.add_comment(PbxComment('Begin PBXShellScriptBuildPhase section'))
+ self.generate_pbx_shell_build_phase(objects_dict)
+ objects_dict.add_comment(PbxComment('End PBXShellScriptBuildPhase section'))
+ objects_dict.add_comment(PbxComment('Begin PBXSourcesBuildPhase section'))
+ self.generate_pbx_sources_build_phase(objects_dict)
+ objects_dict.add_comment(PbxComment('End PBXSourcesBuildPhase section'))
+ objects_dict.add_comment(PbxComment('Begin PBXTargetDependency section'))
+ self.generate_pbx_target_dependency(objects_dict)
+ objects_dict.add_comment(PbxComment('End PBXTargetDependency section'))
+ objects_dict.add_comment(PbxComment('Begin XCBuildConfiguration section'))
+ self.generate_xc_build_configuration(objects_dict)
+ objects_dict.add_comment(PbxComment('End XCBuildConfiguration section'))
+ objects_dict.add_comment(PbxComment('Begin XCConfigurationList section'))
+ self.generate_xc_configurationList(objects_dict)
+ objects_dict.add_comment(PbxComment('End XCConfigurationList section'))
+ self.generate_suffix(self.top_level_dict)
+ self.write_pbxfile(self.top_level_dict, self.proj_file)
+ self.generate_regen_info()
+
+ def get_xcodetype(self, fname):
+ xcodetype = XCODETYPEMAP.get(fname.split('.')[-1].lower())
+ if not xcodetype:
+ xcodetype = 'sourcecode.unknown'
+ return xcodetype
+
+ def generate_filemap(self):
+ self.filemap = {} # Key is source file relative to src root.
+ self.target_filemap = {}
+ for name, t in self.build_targets.items():
+ for s in t.sources:
+ if isinstance(s, mesonlib.File):
+ s = os.path.join(s.subdir, s.fname)
+ self.filemap[s] = self.gen_id()
+ for o in t.objects:
+ if isinstance(o, str):
+ o = os.path.join(t.subdir, o)
+ self.filemap[o] = self.gen_id()
+ self.target_filemap[name] = self.gen_id()
+
+ def generate_buildstylemap(self):
+ self.buildstylemap = {self.buildtype: self.gen_id()}
+
+ def generate_build_phase_map(self):
+ for tname, t in self.build_targets.items():
+ # generate id for our own target-name
+ t.buildphasemap = {}
+ t.buildphasemap[tname] = self.gen_id()
+ # each target can have it's own Frameworks/Sources/..., generate id's for those
+ t.buildphasemap['Frameworks'] = self.gen_id()
+ t.buildphasemap['Resources'] = self.gen_id()
+ t.buildphasemap['Sources'] = self.gen_id()
+
+ def generate_build_configuration_map(self):
+ self.buildconfmap = {}
+ for t in self.build_targets:
+ bconfs = {self.buildtype: self.gen_id()}
+ self.buildconfmap[t] = bconfs
+ for t in self.custom_targets:
+ bconfs = {self.buildtype: self.gen_id()}
+ self.buildconfmap[t] = bconfs
+
+ def generate_project_configurations_map(self):
+ self.project_configurations = {self.buildtype: self.gen_id()}
+
+ def generate_buildall_configurations_map(self):
+ self.buildall_configurations = {self.buildtype: self.gen_id()}
+
+ def generate_test_configurations_map(self):
+ self.test_configurations = {self.buildtype: self.gen_id()}
+
+ def generate_build_configurationlist_map(self):
+ self.buildconflistmap = {}
+ for t in self.build_targets:
+ self.buildconflistmap[t] = self.gen_id()
+ for t in self.custom_targets:
+ self.buildconflistmap[t] = self.gen_id()
+
+ def generate_native_target_map(self):
+ self.native_targets = {}
+ for t in self.build_targets:
+ self.native_targets[t] = self.gen_id()
+
+ def generate_custom_target_map(self):
+ self.shell_targets = {}
+ self.custom_target_output_buildfile = {}
+ self.custom_target_output_fileref = {}
+ for tname, t in self.custom_targets.items():
+ self.shell_targets[tname] = self.gen_id()
+ if not isinstance(t, build.CustomTarget):
+ continue
+ (srcs, ofilenames, cmd) = self.eval_custom_target_command(t)
+ for o in ofilenames:
+ self.custom_target_output_buildfile[o] = self.gen_id()
+ self.custom_target_output_fileref[o] = self.gen_id()
+
+ def generate_generator_target_map(self):
+ # Generator objects do not have natural unique ids
+ # so use a counter.
+ self.generator_fileref_ids = {}
+ self.generator_buildfile_ids = {}
+ for tname, t in self.build_targets.items():
+ generator_id = 0
+ for genlist in t.generated:
+ if not isinstance(genlist, build.GeneratedList):
+ continue
+ self.gen_single_target_map(genlist, tname, t, generator_id)
+ generator_id += 1
+ # FIXME add outputs.
+ for tname, t in self.custom_targets.items():
+ generator_id = 0
+ for genlist in t.sources:
+ if not isinstance(genlist, build.GeneratedList):
+ continue
+ self.gen_single_target_map(genlist, tname, t, generator_id)
+ generator_id += 1
+
+ def gen_single_target_map(self, genlist, tname, t, generator_id):
+ k = (tname, generator_id)
+ assert(k not in self.shell_targets)
+ self.shell_targets[k] = self.gen_id()
+ ofile_abs = []
+ for i in genlist.get_inputs():
+ for o_base in genlist.get_outputs_for(i):
+ o = os.path.join(self.get_target_private_dir(t), o_base)
+ ofile_abs.append(os.path.join(self.environment.get_build_dir(), o))
+ assert(k not in self.generator_outputs)
+ self.generator_outputs[k] = ofile_abs
+ buildfile_ids = []
+ fileref_ids = []
+ for i in range(len(ofile_abs)):
+ buildfile_ids.append(self.gen_id())
+ fileref_ids.append(self.gen_id())
+ self.generator_buildfile_ids[k] = buildfile_ids
+ self.generator_fileref_ids[k] = fileref_ids
+
+
+ def generate_native_frameworks_map(self):
+ self.native_frameworks = {}
+ self.native_frameworks_fileref = {}
+ for t in self.build_targets.values():
+ for dep in t.get_external_deps():
+ if isinstance(dep, dependencies.AppleFrameworks):
+ for f in dep.frameworks:
+ self.native_frameworks[f] = self.gen_id()
+ self.native_frameworks_fileref[f] = self.gen_id()
+
+ def generate_target_dependency_map(self):
+ self.target_dependency_map = {}
+ for tname, t in self.build_targets.items():
+ for target in t.link_targets:
+ if isinstance(target, build.CustomTargetIndex):
+ k = (tname, target.target.get_basename())
+ if k in self.target_dependency_map:
+ continue
+ else:
+ k = (tname, target.get_basename())
+ assert(k not in self.target_dependency_map)
+ self.target_dependency_map[k] = self.gen_id()
+ for tname, t in self.custom_targets.items():
+ k = tname
+ assert(k not in self.target_dependency_map)
+ self.target_dependency_map[k] = self.gen_id()
+
+ def generate_pbxdep_map(self):
+ self.pbx_dep_map = {}
+ self.pbx_custom_dep_map = {}
+ for t in self.build_targets:
+ self.pbx_dep_map[t] = self.gen_id()
+ for t in self.custom_targets:
+ self.pbx_custom_dep_map[t] = self.gen_id()
+
+ def generate_containerproxy_map(self):
+ self.containerproxy_map = {}
+ for t in self.build_targets:
+ self.containerproxy_map[t] = self.gen_id()
+
+ def generate_target_file_maps(self):
+ self.generate_target_file_maps_impl(self.build_targets)
+ self.generate_target_file_maps_impl(self.custom_targets)
+
+ def generate_target_file_maps_impl(self, targets):
+ for tname, t in targets.items():
+ for s in t.sources:
+ if isinstance(s, mesonlib.File):
+ s = os.path.join(s.subdir, s.fname)
+ if not isinstance(s, str):
+ continue
+ k = (tname, s)
+ assert(k not in self.buildfile_ids)
+ self.buildfile_ids[k] = self.gen_id()
+ assert(k not in self.fileref_ids)
+ self.fileref_ids[k] = self.gen_id()
+ if not hasattr(t, 'objects'):
+ continue
+ for o in t.objects:
+ if isinstance(o, build.ExtractedObjects):
+ # Extracted objects do not live in "the Xcode world".
+ continue
+ if isinstance(o, mesonlib.File):
+ o = os.path.join(o.subdir, o.fname)
+ if isinstance(o, str):
+ o = os.path.join(t.subdir, o)
+ k = (tname, o)
+ assert(k not in self.buildfile_ids)
+ self.buildfile_ids[k] = self.gen_id()
+ assert(k not in self.fileref_ids)
+ self.fileref_ids[k] = self.gen_id()
+ else:
+ raise RuntimeError('Unknown input type ' + str(o))
+
+ def generate_build_file_maps(self):
+ for buildfile in self.interpreter.get_build_def_files():
+ assert(isinstance(buildfile, str))
+ self.buildfile_ids[buildfile] = self.gen_id()
+ self.fileref_ids[buildfile] = self.gen_id()
+
+ def generate_source_phase_map(self):
+ self.source_phase = {}
+ for t in self.build_targets:
+ self.source_phase[t] = self.gen_id()
+
+ def generate_pbx_aggregate_target(self, objects_dict):
+ self.custom_aggregate_targets = {}
+ self.build_all_tdep_id = self.gen_id()
+ # FIXME: filter out targets that are not built by default.
+ target_dependencies = list(map(lambda t: self.pbx_dep_map[t], self.build_targets))
+ custom_target_dependencies = [self.pbx_custom_dep_map[t] for t in self.custom_targets]
+ aggregated_targets = []
+ aggregated_targets.append((self.all_id, 'ALL_BUILD',
+ self.all_buildconf_id,
+ [],
+ [self.regen_dependency_id] + target_dependencies + custom_target_dependencies))
+ aggregated_targets.append((self.test_id,
+ 'RUN_TESTS',
+ self.test_buildconf_id,
+ [self.test_command_id],
+ [self.regen_dependency_id, self.build_all_tdep_id]))
+ aggregated_targets.append((self.regen_id,
+ 'REGENERATE',
+ self.regen_buildconf_id,
+ [self.regen_command_id],
+ []))
+ for tname, t in self.build.get_custom_targets().items():
+ ct_id = self.gen_id()
+ self.custom_aggregate_targets[tname] = ct_id
+ build_phases = []
+ dependencies = [self.regen_dependency_id]
+ generator_id = 0
+ for s in t.sources:
+ if not isinstance(s, build.GeneratedList):
+ continue
+ build_phases.append(self.shell_targets[(tname, generator_id)])
+ for d in s.depends:
+ dependencies.append(self.pbx_custom_dep_map[d.get_id()])
+ generator_id += 1
+ build_phases.append(self.shell_targets[tname])
+ aggregated_targets.append((ct_id, tname, self.buildconflistmap[tname], build_phases, dependencies))
+
+ # Sort objects by ID before writing
+ sorted_aggregated_targets = sorted(aggregated_targets, key=operator.itemgetter(0))
+ for t in sorted_aggregated_targets:
+ agt_dict = PbxDict()
+ name = t[1]
+ buildconf_id = t[2]
+ build_phases = t[3]
+ dependencies = t[4]
+ agt_dict.add_item('isa', 'PBXAggregateTarget')
+ agt_dict.add_item('buildConfigurationList', buildconf_id, f'Build configuration list for PBXAggregateTarget "{name}"')
+ bp_arr = PbxArray()
+ agt_dict.add_item('buildPhases', bp_arr)
+ for bp in build_phases:
+ bp_arr.add_item(bp, 'ShellScript')
+ dep_arr = PbxArray()
+ agt_dict.add_item('dependencies', dep_arr)
+ for td in dependencies:
+ dep_arr.add_item(td, 'PBXTargetDependency')
+ agt_dict.add_item('name', f'"{name}"')
+ agt_dict.add_item('productName', f'"{name}"')
+ objects_dict.add_item(t[0], agt_dict, name)
+
+ def generate_pbx_build_file(self, objects_dict):
+ for tname, t in self.build_targets.items():
+ for dep in t.get_external_deps():
+ if isinstance(dep, dependencies.AppleFrameworks):
+ for f in dep.frameworks:
+ fw_dict = PbxDict()
+ objects_dict.add_item(self.native_frameworks[f], fw_dict, f'{f}.framework in Frameworks')
+ fw_dict.add_item('isa', 'PBXBuildFile')
+ fw_dict.add_item('fileRef', self.native_frameworks_fileref[f], f)
+
+ for s in t.sources:
+ in_build_dir = False
+ if isinstance(s, mesonlib.File):
+ if s.is_built:
+ in_build_dir = True
+ s = os.path.join(s.subdir, s.fname)
+
+ if not isinstance(s, str):
+ continue
+ sdict = PbxDict()
+ k = (tname, s)
+ idval = self.buildfile_ids[k]
+ fileref = self.fileref_ids[k]
+ if in_build_dir:
+ fullpath = os.path.join(self.environment.get_build_dir(), s)
+ else:
+ fullpath = os.path.join(self.environment.get_source_dir(), s)
+ compiler_args = ''
+ sdict.add_item('isa', 'PBXBuildFile')
+ sdict.add_item('fileRef', fileref, fullpath)
+ objects_dict.add_item(idval, sdict)
+
+ for o in t.objects:
+ if isinstance(o, build.ExtractedObjects):
+ # Object files are not source files as such. We add them
+ # by hand in linker flags. It is also not particularly
+ # clear how to define build files in Xcode's file format.
+ continue
+ if isinstance(o, mesonlib.File):
+ o = os.path.join(o.subdir, o.fname)
+ elif isinstance(o, str):
+ o = os.path.join(t.subdir, o)
+ idval = self.buildfile_ids[(tname, o)]
+ k = (tname, o)
+ fileref = self.fileref_ids[k]
+ assert(o not in self.filemap)
+ self.filemap[o] = idval
+ fullpath = os.path.join(self.environment.get_source_dir(), o)
+ fullpath2 = fullpath
+ o_dict = PbxDict()
+ objects_dict.add_item(idval, o_dict, fullpath)
+ o_dict.add_item('isa', 'PBXBuildFile')
+ o_dict.add_item('fileRef', fileref, fullpath2)
+
+ generator_id = 0
+ for g in t.generated:
+ if not isinstance(g, build.GeneratedList):
+ continue
+ self.create_generator_shellphase(objects_dict, tname, generator_id)
+ generator_id += 1
+
+ # Custom targets are shell build phases in Xcode terminology.
+ for tname, t in self.custom_targets.items():
+ if not isinstance(t, build.CustomTarget):
+ continue
+ (srcs, ofilenames, cmd) = self.eval_custom_target_command(t)
+ for o in ofilenames:
+ custom_dict = PbxDict()
+ objects_dict.add_item(self.custom_target_output_buildfile[o], custom_dict, f'/* {o} */')
+ custom_dict.add_item('isa', 'PBXBuildFile')
+ custom_dict.add_item('fileRef', self.custom_target_output_fileref[o])
+ generator_id = 0
+ for g in t.sources:
+ if not isinstance(g, build.GeneratedList):
+ continue
+ self.create_generator_shellphase(objects_dict, tname, generator_id)
+ generator_id += 1
+
+ def create_generator_shellphase(self, objects_dict, tname, generator_id):
+ file_ids = self.generator_buildfile_ids[(tname, generator_id)]
+ ref_ids = self.generator_fileref_ids[(tname, generator_id)]
+ assert(len(ref_ids) == len(file_ids))
+ for i in range(len(file_ids)):
+ file_o = file_ids[i]
+ ref_id = ref_ids[i]
+ odict = PbxDict()
+ objects_dict.add_item(file_o, odict)
+ odict.add_item('isa', 'PBXBuildFile')
+ odict.add_item('fileRef', ref_id)
+
+ def generate_pbx_build_style(self, objects_dict):
+ # FIXME: Xcode 9 and later does not uses PBXBuildStyle and it gets removed. Maybe we can remove this part.
+ for name, idval in self.buildstylemap.items():
+ styledict = PbxDict()
+ objects_dict.add_item(idval, styledict, name)
+ styledict.add_item('isa', 'PBXBuildStyle')
+ settings_dict = PbxDict()
+ styledict.add_item('buildSettings', settings_dict)
+ settings_dict.add_item('COPY_PHASE_STRIP', 'NO')
+ styledict.add_item('name', f'"{name}"')
+
+ def generate_pbx_container_item_proxy(self, objects_dict):
+ for t in self.build_targets:
+ proxy_dict = PbxDict()
+ objects_dict.add_item(self.containerproxy_map[t], proxy_dict, 'PBXContainerItemProxy')
+ proxy_dict.add_item('isa', 'PBXContainerItemProxy')
+ proxy_dict.add_item('containerPortal', self.project_uid, 'Project object')
+ proxy_dict.add_item('proxyType', '1')
+ proxy_dict.add_item('remoteGlobalIDString', self.native_targets[t])
+ proxy_dict.add_item('remoteInfo', '"' + t + '"')
+
+ def generate_pbx_file_reference(self, objects_dict):
+ for tname, t in self.build_targets.items():
+ for dep in t.get_external_deps():
+ if isinstance(dep, dependencies.AppleFrameworks):
+ for f in dep.frameworks:
+ fw_dict = PbxDict()
+ objects_dict.add_item(self.native_frameworks_fileref[f], fw_dict, f)
+ fw_dict.add_item('isa', 'PBXFileReference')
+ fw_dict.add_item('lastKnownFileType', 'wrapper.framework')
+ fw_dict.add_item('name', f'{f}.framework')
+ fw_dict.add_item('path', f'System/Library/Frameworks/{f}.framework')
+ fw_dict.add_item('sourceTree', 'SDKROOT')
+ for s in t.sources:
+ in_build_dir = False
+ if isinstance(s, mesonlib.File):
+ if s.is_built:
+ in_build_dir = True
+ s = os.path.join(s.subdir, s.fname)
+ if not isinstance(s, str):
+ continue
+ idval = self.fileref_ids[(tname, s)]
+ fullpath = os.path.join(self.environment.get_source_dir(), s)
+ src_dict = PbxDict()
+ xcodetype = self.get_xcodetype(s)
+ name = os.path.basename(s)
+ path = s
+ objects_dict.add_item(idval, src_dict, fullpath)
+ src_dict.add_item('isa', 'PBXFileReference')
+ src_dict.add_item('explicitFileType', '"' + xcodetype + '"')
+ src_dict.add_item('fileEncoding', '4')
+ if in_build_dir:
+ src_dict.add_item('name', '"' + name + '"')
+ # This makes no sense. This should say path instead of name
+ # but then the path gets added twice.
+ src_dict.add_item('path', '"' + name + '"')
+ src_dict.add_item('sourceTree', 'BUILD_ROOT')
+ else:
+ src_dict.add_item('name', '"' + name + '"')
+ src_dict.add_item('path', '"' + path + '"')
+ src_dict.add_item('sourceTree', 'SOURCE_ROOT')
+
+ generator_id = 0
+ for g in t.generated:
+ if not isinstance(g, build.GeneratedList):
+ continue
+ outputs = self.generator_outputs[(tname, generator_id)]
+ ref_ids = self.generator_fileref_ids[tname, generator_id]
+ assert(len(ref_ids) == len(outputs))
+ for i in range(len(outputs)):
+ o = outputs[i]
+ ref_id = ref_ids[i]
+ odict = PbxDict()
+ name = os.path.basename(o)
+ objects_dict.add_item(ref_id, odict, o)
+ xcodetype = self.get_xcodetype(o)
+ rel_name = mesonlib.relpath(o, self.environment.get_source_dir())
+ odict.add_item('isa', 'PBXFileReference')
+ odict.add_item('explicitFileType', '"' + xcodetype + '"')
+ odict.add_item('fileEncoding', '4')
+ odict.add_item('name', f'"{name}"')
+ odict.add_item('path', f'"{rel_name}"')
+ odict.add_item('sourceTree', 'SOURCE_ROOT')
+
+ generator_id += 1
+
+ for o in t.objects:
+ if isinstance(o, build.ExtractedObjects):
+ # Same as with pbxbuildfile.
+ continue
+ if isinstance(o, mesonlib.File):
+ fullpath = o.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir())
+ o = os.path.join(o.subdir, o.fname)
+ else:
+ o = os.path.join(t.subdir, o)
+ fullpath = os.path.join(self.environment.get_source_dir(), o)
+ idval = self.fileref_ids[(tname, o)]
+ rel_name = mesonlib.relpath(fullpath, self.environment.get_source_dir())
+ o_dict = PbxDict()
+ name = os.path.basename(o)
+ objects_dict.add_item(idval, o_dict, fullpath)
+ o_dict.add_item('isa', 'PBXFileReference')
+ o_dict.add_item('explicitFileType', '"' + self.get_xcodetype(o) + '"')
+ o_dict.add_item('fileEncoding', '4')
+ o_dict.add_item('name', f'"{name}"')
+ o_dict.add_item('path', f'"{rel_name}"')
+ o_dict.add_item('sourceTree', 'SOURCE_ROOT')
+ for tname, idval in self.target_filemap.items():
+ target_dict = PbxDict()
+ objects_dict.add_item(idval, target_dict, tname)
+ t = self.build_targets[tname]
+ fname = t.get_filename()
+ reftype = 0
+ if isinstance(t, build.Executable):
+ typestr = 'compiled.mach-o.executable'
+ path = fname
+ elif isinstance(t, build.SharedLibrary):
+ typestr = self.get_xcodetype('dummy.dylib')
+ path = fname
+ else:
+ typestr = self.get_xcodetype(fname)
+ path = '"%s"' % t.get_filename()
+ target_dict.add_item('isa', 'PBXFileReference')
+ target_dict.add_item('explicitFileType', '"' + typestr + '"')
+ if ' ' in path and path[0] != '"':
+ target_dict.add_item('path', f'"{path}"')
+ else:
+ target_dict.add_item('path', path)
+ target_dict.add_item('refType', reftype)
+ target_dict.add_item('sourceTree', 'BUILT_PRODUCTS_DIR')
+
+ for tname, t in self.custom_targets.items():
+ if not isinstance(t, build.CustomTarget):
+ continue
+ (srcs, ofilenames, cmd) = self.eval_custom_target_command(t)
+ for s in t.sources:
+ if isinstance(s, mesonlib.File):
+ s = os.path.join(s.subdir, s.fname)
+ elif isinstance(s, str):
+ s = os.path.joni(t.subdir, s)
+ else:
+ continue
+ custom_dict = PbxDict()
+ typestr = self.get_xcodetype(s)
+ custom_dict.add_item('isa', 'PBXFileReference')
+ custom_dict.add_item('explicitFileType', '"' + typestr + '"')
+ custom_dict.add_item('name', f'"{s}"')
+ custom_dict.add_item('path', f'"{s}"')
+ custom_dict.add_item('refType', 0)
+ custom_dict.add_item('sourceTree', 'SOURCE_ROOT')
+ objects_dict.add_item(self.fileref_ids[(tname, s)], custom_dict)
+ for o in ofilenames:
+ custom_dict = PbxDict()
+ typestr = self.get_xcodetype(o)
+ custom_dict.add_item('isa', 'PBXFileReference')
+ custom_dict.add_item('explicitFileType', '"' + typestr + '"')
+ custom_dict.add_item('name', o)
+ custom_dict.add_item('path', os.path.join(self.src_to_build, o))
+ custom_dict.add_item('refType', 0)
+ custom_dict.add_item('sourceTree', 'SOURCE_ROOT')
+ objects_dict.add_item(self.custom_target_output_fileref[o], custom_dict)
+
+ for buildfile in self.interpreter.get_build_def_files():
+ basename = os.path.split(buildfile)[1]
+ buildfile_dict = PbxDict()
+ typestr = self.get_xcodetype(buildfile)
+ buildfile_dict.add_item('isa', 'PBXFileReference')
+ buildfile_dict.add_item('explicitFileType', '"' + typestr + '"')
+ buildfile_dict.add_item('name', f'"{basename}"')
+ buildfile_dict.add_item('path', f'"{buildfile}"')
+ buildfile_dict.add_item('refType', 0)
+ buildfile_dict.add_item('sourceTree', 'SOURCE_ROOT')
+ objects_dict.add_item(self.fileref_ids[buildfile], buildfile_dict)
+
+ def generate_pbx_frameworks_buildphase(self, objects_dict):
+ for t in self.build_targets.values():
+ bt_dict = PbxDict()
+ objects_dict.add_item(t.buildphasemap['Frameworks'], bt_dict, 'Frameworks')
+ bt_dict.add_item('isa', 'PBXFrameworksBuildPhase')
+ bt_dict.add_item('buildActionMask', 2147483647)
+ file_list = PbxArray()
+ bt_dict.add_item('files', file_list)
+ for dep in t.get_external_deps():
+ if isinstance(dep, dependencies.AppleFrameworks):
+ for f in dep.frameworks:
+ file_list.add_item(self.native_frameworks[f], f'{f}.framework in Frameworks')
+ bt_dict.add_item('runOnlyForDeploymentPostprocessing', 0)
+
+ def generate_pbx_group(self, objects_dict):
+ groupmap = {}
+ target_src_map = {}
+ for t in self.build_targets:
+ groupmap[t] = self.gen_id()
+ target_src_map[t] = self.gen_id()
+ for t in self.custom_targets:
+ groupmap[t] = self.gen_id()
+ target_src_map[t] = self.gen_id()
+ projecttree_id = self.gen_id()
+ resources_id = self.gen_id()
+ products_id = self.gen_id()
+ frameworks_id = self.gen_id()
+ main_dict = PbxDict()
+ objects_dict.add_item(self.maingroup_id, main_dict)
+ main_dict.add_item('isa', 'PBXGroup')
+ main_children = PbxArray()
+ main_dict.add_item('children', main_children)
+ main_children.add_item(projecttree_id, 'Project tree')
+ main_children.add_item(resources_id, 'Resources')
+ main_children.add_item(products_id, 'Products')
+ main_children.add_item(frameworks_id, 'Frameworks')
+ main_dict.add_item('sourceTree', '"<group>"')
+
+ self.add_projecttree(objects_dict, projecttree_id)
+
+ resource_dict = PbxDict()
+ objects_dict.add_item(resources_id, resource_dict, 'Resources')
+ resource_dict.add_item('isa', 'PBXGroup')
+ resource_children = PbxArray()
+ resource_dict.add_item('children', resource_children)
+ resource_dict.add_item('name', 'Resources')
+ resource_dict.add_item('sourceTree', '"<group>"')
+
+ frameworks_dict = PbxDict()
+ objects_dict.add_item(frameworks_id, frameworks_dict, 'Frameworks')
+ frameworks_dict.add_item('isa', 'PBXGroup')
+ frameworks_children = PbxArray()
+ frameworks_dict.add_item('children', frameworks_children)
+ # write frameworks
+
+ for t in self.build_targets.values():
+ for dep in t.get_external_deps():
+ if isinstance(dep, dependencies.AppleFrameworks):
+ for f in dep.frameworks:
+ frameworks_children.add_item(self.native_frameworks_fileref[f], f)
+
+ frameworks_dict.add_item('name', 'Frameworks')
+ frameworks_dict.add_item('sourceTree', '"<group>"')
+
+ for tname, t in self.custom_targets.items():
+ target_dict = PbxDict()
+ objects_dict.add_item(groupmap[tname], target_dict, tname)
+ target_dict.add_item('isa', 'PBXGroup')
+ target_children = PbxArray()
+ target_dict.add_item('children', target_children)
+ target_children.add_item(target_src_map[tname], 'Source files')
+ if t.subproject:
+ target_dict.add_item('name', f'"{t.subproject} • {t.name}"')
+ else:
+ target_dict.add_item('name', f'"{t.name}"')
+ target_dict.add_item('sourceTree', '"<group>"')
+ source_files_dict = PbxDict()
+ objects_dict.add_item(target_src_map[tname], source_files_dict, 'Source files')
+ source_files_dict.add_item('isa', 'PBXGroup')
+ source_file_children = PbxArray()
+ source_files_dict.add_item('children', source_file_children)
+ for s in t.sources:
+ if isinstance(s, mesonlib.File):
+ s = os.path.join(s.subdir, s.fname)
+ elif isinstance(s, str):
+ s = os.path.joni(t.subdir, s)
+ else:
+ continue
+ source_file_children.add_item(self.fileref_ids[(tname, s)], s)
+ source_files_dict.add_item('name', '"Source files"')
+ source_files_dict.add_item('sourceTree', '"<group>"')
+
+ # And finally products
+ product_dict = PbxDict()
+ objects_dict.add_item(products_id, product_dict, 'Products')
+ product_dict.add_item('isa', 'PBXGroup')
+ product_children = PbxArray()
+ product_dict.add_item('children', product_children)
+ for t in self.build_targets:
+ product_children.add_item(self.target_filemap[t], t)
+ product_dict.add_item('name', 'Products')
+ product_dict.add_item('sourceTree', '"<group>"')
+
+ def write_group_target_entry(self, objects_dict, t):
+ tid = t.get_id()
+ group_id = self.gen_id()
+ target_dict = PbxDict()
+ objects_dict.add_item(group_id, target_dict, tid)
+ target_dict.add_item('isa', 'PBXGroup')
+ target_children = PbxArray()
+ target_dict.add_item('children', target_children)
+ target_dict.add_item('name', f'"{t} · target"')
+ target_dict.add_item('sourceTree', '"<group>"')
+ source_files_dict = PbxDict()
+ for s in t.sources:
+ if isinstance(s, mesonlib.File):
+ s = os.path.join(s.subdir, s.fname)
+ elif isinstance(s, str):
+ s = os.path.joni(t.subdir, s)
+ else:
+ continue
+ target_children.add_item(self.fileref_ids[(tid, s)], s)
+ for o in t.objects:
+ if isinstance(o, build.ExtractedObjects):
+ # Do not show built object files in the project tree.
+ continue
+ if isinstance(o, mesonlib.File):
+ o = os.path.join(o.subdir, o.fname)
+ else:
+ o = os.path.join(t.subdir, o)
+ target_children.add_item(self.fileref_ids[(tid, o)], o)
+ source_files_dict.add_item('name', '"Source files"')
+ source_files_dict.add_item('sourceTree', '"<group>"')
+ return group_id
+
+ def add_projecttree(self, objects_dict, projecttree_id):
+ root_dict = PbxDict()
+ objects_dict.add_item(projecttree_id, root_dict, "Root of project tree")
+ root_dict.add_item('isa', 'PBXGroup')
+ target_children = PbxArray()
+ root_dict.add_item('children', target_children)
+ root_dict.add_item('name', '"Project root"')
+ root_dict.add_item('sourceTree', '"<group>"')
+
+ project_tree = self.generate_project_tree()
+ self.write_tree(objects_dict, project_tree, target_children, '')
+
+ def write_tree(self, objects_dict, tree_node, children_array, current_subdir):
+ subdir_dict = PbxDict()
+ subdir_children = PbxArray()
+ for subdir_name, subdir_node in tree_node.subdirs.items():
+ subdir_id = self.gen_id()
+ objects_dict.add_item(subdir_id, subdir_dict)
+ children_array.add_item(subdir_id)
+ subdir_dict.add_item('isa', 'PBXGroup')
+ subdir_dict.add_item('children', subdir_children)
+ subdir_dict.add_item('name', f'"{subdir_name}"')
+ subdir_dict.add_item('sourceTree', '"<group>"')
+ self.write_tree(objects_dict, subdir_node, subdir_children, os.path.join(current_subdir, subdir_name))
+ for target in tree_node.targets:
+ group_id = self.write_group_target_entry(objects_dict, target)
+ children_array.add_item(group_id)
+ potentials = [os.path.join(current_subdir, 'meson.build'),
+ os.path.join(current_subdir, 'meson_options.txt')]
+ for bf in potentials:
+ i = self.fileref_ids.get(bf, None)
+ if i:
+ children_array.add_item(i)
+
+
+ def generate_project_tree(self):
+ tree_info = FileTreeEntry()
+ for tname, t in self.build_targets.items():
+ self.add_target_to_tree(tree_info, t)
+ return tree_info
+
+ def add_target_to_tree(self, tree_root, t):
+ current_node = tree_root
+ path_segments = t.subdir.split('/')
+ for s in path_segments:
+ if not s:
+ continue
+ if s not in current_node.subdirs:
+ current_node.subdirs[s] = FileTreeEntry()
+ current_node = current_node.subdirs[s]
+ current_node.targets.append(t)
+
+ def generate_pbx_native_target(self, objects_dict):
+ for tname, idval in self.native_targets.items():
+ ntarget_dict = PbxDict()
+ t = self.build_targets[tname]
+ objects_dict.add_item(idval, ntarget_dict, tname)
+ ntarget_dict.add_item('isa', 'PBXNativeTarget')
+ ntarget_dict.add_item('buildConfigurationList', self.buildconflistmap[tname], f'Build configuration list for PBXNativeTarget "{tname}"')
+ buildphases_array = PbxArray()
+ ntarget_dict.add_item('buildPhases', buildphases_array)
+ generator_id = 0
+ for g in t.generated:
+ # Custom target are handled via inter-target dependencies.
+ # Generators are built as a shellscriptbuildphase.
+ if isinstance(g, build.GeneratedList):
+ buildphases_array.add_item(self.shell_targets[(tname, generator_id)], f'Generator {generator_id}/{tname}')
+ generator_id += 1
+ for bpname, bpval in t.buildphasemap.items():
+ buildphases_array.add_item(bpval, f'{bpname} yyy')
+ ntarget_dict.add_item('buildRules', PbxArray())
+ dep_array = PbxArray()
+ ntarget_dict.add_item('dependencies', dep_array)
+ dep_array.add_item(self.regen_dependency_id)
+ # These dependencies only tell Xcode that the deps must be built
+ # before this one. They don't set up linkage or anything
+ # like that. Those are set up in the XCBuildConfiguration.
+ for lt in self.build_targets[tname].link_targets:
+ # NOT DOCUMENTED, may need to make different links
+ # to same target have different targetdependency item.
+ if isinstance(lt, build.CustomTarget):
+ dep_array.add_item(self.pbx_custom_dep_map[lt.get_id()], lt.name)
+ elif isinstance(lt, build.CustomTargetIndex):
+ dep_array.add_item(self.pbx_custom_dep_map[lt.target.get_id()], lt.target.name)
+ else:
+ idval = self.pbx_dep_map[lt.get_id()]
+ dep_array.add_item(idval, 'PBXTargetDependency')
+ for o in t.objects:
+ if isinstance(o, build.ExtractedObjects):
+ source_target_id = o.target.get_id()
+ idval = self.pbx_dep_map[source_target_id]
+ dep_array.add_item(idval, 'PBXTargetDependency')
+ generator_id = 0
+ for o in t.generated:
+ if isinstance(o, build.CustomTarget):
+ dep_array.add_item(self.pbx_custom_dep_map[o.get_id()], o.name)
+ elif isinstance(o, build.CustomTargetIndex):
+ dep_array.add_item(self.pbx_custom_dep_map[o.target.get_id()], o.target.name)
+
+ generator_id += 1
+
+ ntarget_dict.add_item('name', f'"{tname}"')
+ ntarget_dict.add_item('productName', f'"{tname}"')
+ ntarget_dict.add_item('productReference', self.target_filemap[tname], tname)
+ if isinstance(t, build.Executable):
+ typestr = 'com.apple.product-type.tool'
+ elif isinstance(t, build.StaticLibrary):
+ typestr = 'com.apple.product-type.library.static'
+ elif isinstance(t, build.SharedLibrary):
+ typestr = 'com.apple.product-type.library.dynamic'
+ else:
+ raise MesonException('Unknown target type for %s' % tname)
+ ntarget_dict.add_item('productType', f'"{typestr}"')
+
+ def generate_pbx_project(self, objects_dict):
+ project_dict = PbxDict()
+ objects_dict.add_item(self.project_uid, project_dict, 'Project object')
+ project_dict.add_item('isa', 'PBXProject')
+ attr_dict = PbxDict()
+ project_dict.add_item('attributes', attr_dict)
+ attr_dict.add_item('BuildIndependentTargetsInParallel', 'YES')
+ project_dict.add_item('buildConfigurationList', self.project_conflist, f'Build configuration list for PBXProject "{self.build.project_name}"')
+ project_dict.add_item('buildSettings', PbxDict())
+ style_arr = PbxArray()
+ project_dict.add_item('buildStyles', style_arr)
+ for name, idval in self.buildstylemap.items():
+ style_arr.add_item(idval, name)
+ project_dict.add_item('compatibilityVersion', '"Xcode 3.2"')
+ project_dict.add_item('hasScannedForEncodings', 0)
+ project_dict.add_item('mainGroup', self.maingroup_id)
+ project_dict.add_item('projectDirPath', '"' + self.environment.get_source_dir() + '"')
+ project_dict.add_item('projectRoot', '""')
+ targets_arr = PbxArray()
+ project_dict.add_item('targets', targets_arr)
+ targets_arr.add_item(self.all_id, 'ALL_BUILD')
+ targets_arr.add_item(self.test_id, 'RUN_TESTS')
+ targets_arr.add_item(self.regen_id, 'REGENERATE')
+ for t in self.build_targets:
+ targets_arr.add_item(self.native_targets[t], t)
+ for t in self.custom_targets:
+ targets_arr.add_item(self.custom_aggregate_targets[t], t)
+
+ def generate_pbx_shell_build_phase(self, objects_dict):
+ self.generate_test_shell_build_phase(objects_dict)
+ self.generate_regen_shell_build_phase(objects_dict)
+ self.generate_custom_target_shell_build_phases(objects_dict)
+ self.generate_generator_target_shell_build_phases(objects_dict)
+
+ def generate_test_shell_build_phase(self, objects_dict):
+ shell_dict = PbxDict()
+ objects_dict.add_item(self.test_command_id, shell_dict, 'ShellScript')
+ shell_dict.add_item('isa', 'PBXShellScriptBuildPhase')
+ shell_dict.add_item('buildActionMask', 2147483647)
+ shell_dict.add_item('files', PbxArray())
+ shell_dict.add_item('inputPaths', PbxArray())
+ shell_dict.add_item('outputPaths', PbxArray())
+ shell_dict.add_item('runOnlyForDeploymentPostprocessing', 0)
+ shell_dict.add_item('shellPath', '/bin/sh')
+ cmd = mesonlib.get_meson_command() + ['test', '--no-rebuild', '-C', self.environment.get_build_dir()]
+ cmdstr = ' '.join(["'%s'" % i for i in cmd])
+ shell_dict.add_item('shellScript', f'"{cmdstr}"')
+ shell_dict.add_item('showEnvVarsInLog', 0)
+
+ def generate_regen_shell_build_phase(self, objects_dict):
+ shell_dict = PbxDict()
+ objects_dict.add_item(self.regen_command_id, shell_dict, 'ShellScript')
+ shell_dict.add_item('isa', 'PBXShellScriptBuildPhase')
+ shell_dict.add_item('buildActionMask', 2147483647)
+ shell_dict.add_item('files', PbxArray())
+ shell_dict.add_item('inputPaths', PbxArray())
+ shell_dict.add_item('outputPaths', PbxArray())
+ shell_dict.add_item('runOnlyForDeploymentPostprocessing', 0)
+ shell_dict.add_item('shellPath', '/bin/sh')
+ cmd = mesonlib.get_meson_command() + ['--internal', 'regencheck', os.path.join(self.environment.get_build_dir(), 'meson-private')]
+ cmdstr = ' '.join(["'%s'" % i for i in cmd])
+ shell_dict.add_item('shellScript', f'"{cmdstr}"')
+ shell_dict.add_item('showEnvVarsInLog', 0)
+
+ def generate_custom_target_shell_build_phases(self, objects_dict):
+ # Custom targets are shell build phases in Xcode terminology.
+ for tname, t in self.custom_targets.items():
+ if not isinstance(t, build.CustomTarget):
+ continue
+ (srcs, ofilenames, cmd) = self.eval_custom_target_command(t, absolute_outputs=True)
+ fixed_cmd, _ = self.as_meson_exe_cmdline(t.name,
+ cmd[0],
+ cmd[1:],
+ #workdir=None,
+ env=t.env)
+ custom_dict = PbxDict()
+ objects_dict.add_item(self.shell_targets[tname], custom_dict, f'/* Custom target {tname} */')
+ custom_dict.add_item('isa', 'PBXShellScriptBuildPhase')
+ custom_dict.add_item('buildActionMask', 2147483647)
+ custom_dict.add_item('files', PbxArray())
+ custom_dict.add_item('inputPaths', PbxArray())
+ outarray = PbxArray()
+ custom_dict.add_item('name', '"Generate {}."'.format(ofilenames[0]))
+ custom_dict.add_item('outputPaths', outarray)
+ for o in ofilenames:
+ outarray.add_item(os.path.join(self.environment.get_build_dir(), o))
+ custom_dict.add_item('runOnlyForDeploymentPostprocessing', 0)
+ custom_dict.add_item('shellPath', '/bin/sh')
+ workdir = self.environment.get_build_dir()
+ quoted_cmd = []
+ for c in fixed_cmd:
+ quoted_cmd.append(c.replace('"', chr(92) + '"'))
+ cmdstr = ' '.join([f"\\'{x}\\'" for x in quoted_cmd])
+ custom_dict.add_item('shellScript', f'"cd {workdir}; {cmdstr}"')
+ custom_dict.add_item('showEnvVarsInLog', 0)
+
+ def generate_generator_target_shell_build_phases(self, objects_dict):
+ for tname, t in self.build_targets.items():
+ generator_id = 0
+ for genlist in t.generated:
+ if isinstance(genlist, build.GeneratedList):
+ self.generate_single_generator_phase(tname, t, genlist, generator_id, objects_dict)
+ generator_id += 1
+ for tname, t in self.custom_targets.items():
+ generator_id = 0
+ for genlist in t.sources:
+ if isinstance(genlist, build.GeneratedList):
+ self.generate_single_generator_phase(tname, t, genlist, generator_id, objects_dict)
+ generator_id += 1
+
+ def generate_single_generator_phase(self, tname, t, genlist, generator_id, objects_dict):
+ generator = genlist.get_generator()
+ exe = generator.get_exe()
+ exe_arr = self.build_target_to_cmd_array(exe)
+ workdir = self.environment.get_build_dir()
+ gen_dict = PbxDict()
+ objects_dict.add_item(self.shell_targets[(tname, generator_id)], gen_dict, f'"Generator {generator_id}/{tname}"')
+ infilelist = genlist.get_inputs()
+ outfilelist = genlist.get_outputs()
+ gen_dict.add_item('isa', 'PBXShellScriptBuildPhase')
+ gen_dict.add_item('buildActionMask', 2147483647)
+ gen_dict.add_item('files', PbxArray())
+ gen_dict.add_item('inputPaths', PbxArray())
+ gen_dict.add_item('name', f'"Generator {generator_id}/{tname}"')
+ commands = [["cd", workdir]] # Array of arrays, each one a single command, will get concatenated below.
+ k = (tname, generator_id)
+ ofile_abs = self.generator_outputs[k]
+ outarray = PbxArray()
+ gen_dict.add_item('outputPaths', outarray)
+ for of in ofile_abs:
+ outarray.add_item(of)
+ for i in infilelist:
+ # This might be needed to be added to inputPaths. It's not done yet as it is
+ # unclear whether it is necessary, what actually happens when it is defined
+ # and currently the build works without it.
+ #infile_abs = i.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir())
+ infilename = i.rel_to_builddir(self.build_to_src)
+ base_args = generator.get_arglist(infilename)
+ for o_base in genlist.get_outputs_for(i):
+ o = os.path.join(self.get_target_private_dir(t), o_base)
+ args = []
+ for arg in base_args:
+ arg = arg.replace("@INPUT@", infilename)
+ arg = arg.replace('@OUTPUT@', o).replace('@BUILD_DIR@', self.get_target_private_dir(t))
+ arg = arg.replace("@CURRENT_SOURCE_DIR@", os.path.join(self.build_to_src, t.subdir))
+ args.append(arg)
+ args = self.replace_outputs(args, self.get_target_private_dir(t), outfilelist)
+ args = self.replace_extra_args(args, genlist)
+ if generator.capture:
+ # When capturing, stdout is the output. Forward it with the shell.
+ full_command = ['('] + exe_arr + args + ['>', o, ')']
+ else:
+ full_command = exe_arr + args
+ commands.append(full_command)
+ gen_dict.add_item('runOnlyForDeploymentPostprocessing', 0)
+ gen_dict.add_item('shellPath', '/bin/sh')
+ quoted_cmds = []
+ for cmnd in commands:
+ q = []
+ for c in cmnd:
+ if ' ' in c:
+ q.append(f'\\"{c}\\"')
+ else:
+ q.append(c)
+ quoted_cmds.append(' '.join(q))
+ cmdstr = '"' + ' && '.join(quoted_cmds) + '"'
+ gen_dict.add_item('shellScript', cmdstr)
+ gen_dict.add_item('showEnvVarsInLog', 0)
+
+
+ def generate_pbx_sources_build_phase(self, objects_dict):
+ for name in self.source_phase.keys():
+ phase_dict = PbxDict()
+ t = self.build_targets[name]
+ objects_dict.add_item(t.buildphasemap[name], phase_dict, 'Sources')
+ phase_dict.add_item('isa', 'PBXSourcesBuildPhase')
+ phase_dict.add_item('buildActionMask', 2147483647)
+ file_arr = PbxArray()
+ phase_dict.add_item('files', file_arr)
+ for s in self.build_targets[name].sources:
+ s = os.path.join(s.subdir, s.fname)
+ if not self.environment.is_header(s):
+ file_arr.add_item(self.buildfile_ids[(name, s)], os.path.join(self.environment.get_source_dir(), s))
+ generator_id = 0
+ for gt in t.generated:
+ if isinstance(gt, build.CustomTarget):
+ (srcs, ofilenames, cmd) = self.eval_custom_target_command(gt)
+ for o in ofilenames:
+ file_arr.add_item(self.custom_target_output_buildfile[o],
+ os.path.join(self.environment.get_build_dir(), o))
+ elif isinstance(gt, build.CustomTargetIndex):
+ for o in gt.get_outputs():
+ file_arr.add_item(self.custom_target_output_buildfile[o],
+ os.path.join(self.environment.get_build_dir(), o))
+ elif isinstance(gt, build.GeneratedList):
+ genfiles = self.generator_buildfile_ids[(name, generator_id)]
+ generator_id += 1
+ for o in genfiles:
+ file_arr.add_item(o)
+ else:
+ raise RuntimeError('Unknown input type: ' + str(gt))
+ phase_dict.add_item('runOnlyForDeploymentPostprocessing', 0)
+
+ def generate_pbx_target_dependency(self, objects_dict):
+ all_dict = PbxDict()
+ objects_dict.add_item(self.build_all_tdep_id, all_dict, 'ALL_BUILD')
+ all_dict.add_item('isa', 'PBXTargetDependency')
+ all_dict.add_item('target', self.all_id)
+ targets = []
+ targets.append((self.regen_dependency_id, self.regen_id, 'REGEN', None))
+ for t in self.build_targets:
+ idval = self.pbx_dep_map[t] # VERIFY: is this correct?
+ targets.append((idval, self.native_targets[t], t, self.containerproxy_map[t]))
+
+ for t in self.custom_targets:
+ idval = self.pbx_custom_dep_map[t]
+ targets.append((idval, self.custom_aggregate_targets[t], t, None))#self.containerproxy_map[t]))
+
+ # Sort object by ID
+ sorted_targets = sorted(targets, key=operator.itemgetter(0))
+ for t in sorted_targets:
+ t_dict = PbxDict()
+ objects_dict.add_item(t[0], t_dict, 'PBXTargetDependency')
+ t_dict.add_item('isa', 'PBXTargetDependency')
+ t_dict.add_item('target', t[1], t[2])
+ if t[3] is not None:
+ t_dict.add_item('targetProxy', t[3], 'PBXContainerItemProxy')
+
+ def generate_xc_build_configuration(self, objects_dict):
+ # First the setup for the toplevel project.
+ for buildtype in self.buildtypes:
+ bt_dict = PbxDict()
+ objects_dict.add_item(self.project_configurations[buildtype], bt_dict, buildtype)
+ bt_dict.add_item('isa', 'XCBuildConfiguration')
+ settings_dict = PbxDict()
+ bt_dict.add_item('buildSettings', settings_dict)
+ settings_dict.add_item('ARCHS', '"$(NATIVE_ARCH_ACTUAL)"')
+ settings_dict.add_item('ONLY_ACTIVE_ARCH', 'YES')
+ settings_dict.add_item('SWIFT_VERSION', '5.0')
+ settings_dict.add_item('SDKROOT', '"macosx"')
+ settings_dict.add_item('SYMROOT', '"%s/build"' % self.environment.get_build_dir())
+ bt_dict.add_item('name', f'"{buildtype}"')
+
+ # Then the all target.
+ for buildtype in self.buildtypes:
+ bt_dict = PbxDict()
+ objects_dict.add_item(self.buildall_configurations[buildtype], bt_dict, buildtype)
+ bt_dict.add_item('isa', 'XCBuildConfiguration')
+ settings_dict = PbxDict()
+ bt_dict.add_item('buildSettings', settings_dict)
+ settings_dict.add_item('SYMROOT', '"%s"' % self.environment.get_build_dir())
+ warn_array = PbxArray()
+ warn_array.add_item('"$(inherited)"')
+ settings_dict.add_item('WARNING_CFLAGS', warn_array)
+
+ bt_dict.add_item('name', f'"{buildtype}"')
+
+ # Then the test target.
+ for buildtype in self.buildtypes:
+ bt_dict = PbxDict()
+ objects_dict.add_item(self.test_configurations[buildtype], bt_dict, buildtype)
+ bt_dict.add_item('isa', 'XCBuildConfiguration')
+ settings_dict = PbxDict()
+ bt_dict.add_item('buildSettings', settings_dict)
+ settings_dict.add_item('SYMROOT', '"%s"' % self.environment.get_build_dir())
+ warn_array = PbxArray()
+ settings_dict.add_item('WARNING_CFLAGS', warn_array)
+ warn_array.add_item('"$(inherited)"')
+ bt_dict.add_item('name', f'"{buildtype}"')
+
+ # Now finally targets.
+ for target_name, target in self.build_targets.items():
+ self.generate_single_build_target(objects_dict, target_name, target)
+
+ for target_name, target in self.custom_targets.items():
+ bt_dict = PbxDict()
+ objects_dict.add_item(self.buildconfmap[target_name][buildtype], bt_dict, buildtype)
+ bt_dict.add_item('isa', 'XCBuildConfiguration')
+ settings_dict = PbxDict()
+ bt_dict.add_item('buildSettings', settings_dict)
+ settings_dict.add_item('ARCHS', '"$(NATIVE_ARCH_ACTUAL)"')
+ settings_dict.add_item('ONLY_ACTIVE_ARCH', 'YES')
+ settings_dict.add_item('SDKROOT', '"macosx"')
+ settings_dict.add_item('SYMROOT', '"%s/build"' % self.environment.get_build_dir())
+ bt_dict.add_item('name', f'"{buildtype}"')
+
+
+ def determine_internal_dep_link_args(self, target, buildtype):
+ links_dylib = False
+ dep_libs = []
+ for l in target.link_targets:
+ if isinstance(target, build.SharedModule) and isinstance(l, build.Executable):
+ continue
+ if isinstance(l, build.CustomTargetIndex):
+ rel_dir = self.get_custom_target_output_dir(l.target)
+ libname = l.get_filename()
+ elif isinstance(l, build.CustomTarget):
+ rel_dir = self.get_custom_target_output_dir(l)
+ libname = l.get_filename()
+ else:
+ rel_dir = self.get_target_dir(l)
+ libname = l.get_filename()
+ abs_path = os.path.join(self.environment.get_build_dir(), rel_dir, libname)
+ dep_libs.append("'%s'" % abs_path)
+ if isinstance(l, build.SharedLibrary):
+ links_dylib = True
+ if isinstance(l, build.StaticLibrary):
+ (sub_libs, sub_links_dylib) = self.determine_internal_dep_link_args(l, buildtype)
+ dep_libs += sub_libs
+ links_dylib = links_dylib or sub_links_dylib
+ return (dep_libs, links_dylib)
+
+ def generate_single_build_target(self, objects_dict, target_name, target):
+ for buildtype in self.buildtypes:
+ dep_libs = []
+ links_dylib = False
+ headerdirs = []
+ for d in target.include_dirs:
+ for sd in d.incdirs:
+ cd = os.path.join(d.curdir, sd)
+ headerdirs.append(os.path.join(self.environment.get_source_dir(), cd))
+ headerdirs.append(os.path.join(self.environment.get_build_dir(), cd))
+ for extra in d.extra_build_dirs:
+ headerdirs.append(os.path.join(self.environment.get_build_dir(), extra))
+ (dep_libs, links_dylib) = self.determine_internal_dep_link_args(target, buildtype)
+ if links_dylib:
+ dep_libs = ['-Wl,-search_paths_first', '-Wl,-headerpad_max_install_names'] + dep_libs
+ dylib_version = None
+ if isinstance(target, build.SharedLibrary):
+ if isinstance(target, build.SharedModule):
+ ldargs = []
+ else:
+ ldargs = ['-dynamiclib']
+ ldargs += ['-Wl,-headerpad_max_install_names'] + dep_libs
+ install_path = os.path.join(self.environment.get_build_dir(), target.subdir, buildtype)
+ dylib_version = target.soversion
+ else:
+ ldargs = dep_libs
+ install_path = ''
+ if dylib_version is not None:
+ product_name = target.get_basename() + '.' + dylib_version
+ else:
+ product_name = target.get_basename()
+ ldargs += target.link_args
+ # Swift is special. Again. You can't mix Swift with other languages
+ # in the same target. Thus for Swift we only use
+ if self.is_swift_target(target):
+ linker, stdlib_args = target.compilers['swift'], []
+ else:
+ linker, stdlib_args = self.determine_linker_and_stdlib_args(target)
+ if not isinstance(target, build.StaticLibrary):
+ ldargs += self.build.get_project_link_args(linker, target.subproject, target.for_machine)
+ ldargs += self.build.get_global_link_args(linker, target.for_machine)
+ cargs = []
+ for dep in target.get_external_deps():
+ cargs += dep.get_compile_args()
+ ldargs += dep.get_link_args()
+ for o in target.objects:
+ # Add extracted objects to the link line by hand.
+ if isinstance(o, build.ExtractedObjects):
+ added_objs = set()
+ for objname_rel in o.get_outputs(self):
+ objname_abs = os.path.join(self.environment.get_build_dir(), o.target.subdir, objname_rel)
+ if objname_abs not in added_objs:
+ added_objs.add(objname_abs)
+ ldargs += [r'\"' + objname_abs + r'\"']
+ generator_id = 0
+ for o in target.generated:
+ if isinstance(o, build.GeneratedList):
+ outputs = self.generator_outputs[target_name, generator_id]
+ generator_id += 1
+ for o_abs in outputs:
+ if o_abs.endswith('.o') or o_abs.endswith('.obj'):
+ ldargs += [r'\"' + o_abs + r'\"']
+ else:
+ if isinstance(o, build.CustomTarget):
+ (srcs, ofilenames, cmd) = self.eval_custom_target_command(o)
+ for ofname in ofilenames:
+ if os.path.splitext(ofname)[-1] in LINKABLE_EXTENSIONS:
+ ldargs += [r'\"' + os.path.join(self.environment.get_build_dir(), ofname) + r'\"']
+ elif isinstance(o, build.CustomTargetIndex):
+ for ofname in o.get_outputs():
+ if os.path.splitext(ofname)[-1] in LINKABLE_EXTENSIONS:
+ ldargs += [r'\"' + os.path.join(self.environment.get_build_dir(), ofname) + r'\"']
+ else:
+ raise RuntimeError(o)
+ if isinstance(target, build.SharedModule):
+ options = self.environment.coredata.options
+ ldargs += linker.get_std_shared_module_link_args(options)
+ elif isinstance(target, build.SharedLibrary):
+ ldargs += linker.get_std_shared_lib_link_args()
+ ldstr = ' '.join(ldargs)
+ valid = self.buildconfmap[target_name][buildtype]
+ langargs = {}
+ for lang in self.environment.coredata.compilers[target.for_machine]:
+ if lang not in LANGNAMEMAP:
+ continue
+ compiler = target.compilers.get(lang)
+ if compiler is None:
+ continue
+ # Start with warning args
+ warn_args = compiler.get_warn_args(self.get_option_for_target(OptionKey('warning_level'), target))
+ copt_proxy = self.get_compiler_options_for_target(target)
+ std_args = compiler.get_option_compile_args(copt_proxy)
+ # Add compile args added using add_project_arguments()
+ pargs = self.build.projects_args[target.for_machine].get(target.subproject, {}).get(lang, [])
+ # Add compile args added using add_global_arguments()
+ # These override per-project arguments
+ gargs = self.build.global_args[target.for_machine].get(lang, [])
+ targs = target.get_extra_args(lang)
+ args = warn_args + std_args + pargs + gargs + targs
+ if lang == 'swift':
+ # For some reason putting Swift module dirs in HEADER_SEARCH_PATHS does not work,
+ # but adding -I/path to manual args does work.
+ swift_dep_dirs = self.determine_swift_dep_dirs(target)
+ for d in swift_dep_dirs:
+ args += compiler.get_include_args(d, False)
+ if args:
+ lang_cargs = cargs
+ if compiler and target.implicit_include_directories:
+ # It is unclear what is the cwd when xcode runs. -I. does not seem to
+ # add the root build dir to the search path. So add an absolute path instead.
+ # This may break reproducible builds, in which case patches are welcome.
+ lang_cargs += self.get_custom_target_dir_include_args(target, compiler, absolute_path=True)
+ # Xcode can not handle separate compilation flags for C and ObjectiveC. They are both
+ # put in OTHER_CFLAGS. Same with C++ and ObjectiveC++.
+ if lang == 'objc':
+ lang = 'c'
+ elif lang == 'objcpp':
+ lang = 'cpp'
+ langname = LANGNAMEMAP[lang]
+ if langname in langargs:
+ langargs[langname] += args
+ else:
+ langargs[langname] = args
+ langargs[langname] += lang_cargs
+ symroot = os.path.join(self.environment.get_build_dir(), target.subdir)
+ bt_dict = PbxDict()
+ objects_dict.add_item(valid, bt_dict, buildtype)
+ bt_dict.add_item('isa', 'XCBuildConfiguration')
+ settings_dict = PbxDict()
+ bt_dict.add_item('buildSettings', settings_dict)
+ settings_dict.add_item('COMBINE_HIDPI_IMAGES', 'YES')
+ if isinstance(target, build.SharedModule):
+ settings_dict.add_item('DYLIB_CURRENT_VERSION', '""')
+ settings_dict.add_item('DYLIB_COMPATIBILITY_VERSION', '""')
+ else:
+ if dylib_version is not None:
+ settings_dict.add_item('DYLIB_CURRENT_VERSION', f'"{dylib_version}"')
+ if target.prefix:
+ settings_dict.add_item('EXECUTABLE_PREFIX', target.prefix)
+ if target.suffix:
+ suffix = '.' + target.suffix
+ settings_dict.add_item('EXECUTABLE_SUFFIX', suffix)
+ settings_dict.add_item('GCC_GENERATE_DEBUGGING_SYMBOLS', BOOL2XCODEBOOL[self.get_option_for_target(OptionKey('debug'), target)])
+ settings_dict.add_item('GCC_INLINES_ARE_PRIVATE_EXTERN', 'NO')
+ settings_dict.add_item('GCC_OPTIMIZATION_LEVEL', OPT2XCODEOPT[self.get_option_for_target(OptionKey('optimization'), target)])
+ if target.has_pch:
+ # Xcode uses GCC_PREFIX_HEADER which only allows one file per target/executable. Precompiling various header files and
+ # applying a particular pch to each source file will require custom scripts (as a build phase) and build flags per each
+ # file. Since Xcode itself already discourages precompiled headers in favor of modules we don't try much harder here.
+ pchs = target.get_pch('c') + target.get_pch('cpp') + target.get_pch('objc') + target.get_pch('objcpp')
+ # Make sure to use headers (other backends require implementation files like *.c *.cpp, etc; these should not be used here)
+ pchs = [pch for pch in pchs if pch.endswith('.h') or pch.endswith('.hh') or pch.endswith('hpp')]
+ if pchs:
+ if len(pchs) > 1:
+ mlog.warning(f'Unsupported Xcode configuration: More than 1 precompiled header found "{pchs!s}". Target "{target.name}" might not compile correctly.')
+ relative_pch_path = os.path.join(target.get_subdir(), pchs[0]) # Path relative to target so it can be used with "$(PROJECT_DIR)"
+ settings_dict.add_item('GCC_PRECOMPILE_PREFIX_HEADER', 'YES')
+ settings_dict.add_item('GCC_PREFIX_HEADER', f'"$(PROJECT_DIR)/{relative_pch_path}"')
+ settings_dict.add_item('GCC_PREPROCESSOR_DEFINITIONS', '""')
+ settings_dict.add_item('GCC_SYMBOLS_PRIVATE_EXTERN', 'NO')
+ header_arr = PbxArray()
+ unquoted_headers = []
+ unquoted_headers.append(self.get_target_private_dir_abs(target))
+ if target.implicit_include_directories:
+ unquoted_headers.append(os.path.join(self.environment.get_build_dir(), target.get_subdir()))
+ unquoted_headers.append(os.path.join(self.environment.get_source_dir(), target.get_subdir()))
+ if headerdirs:
+ for i in headerdirs:
+ i = os.path.normpath(i)
+ unquoted_headers.append(i)
+ for i in unquoted_headers:
+ header_arr.add_item(f'"\\"{i}\\""')
+ settings_dict.add_item('HEADER_SEARCH_PATHS', header_arr)
+ settings_dict.add_item('INSTALL_PATH', f'"{install_path}"')
+ settings_dict.add_item('LIBRARY_SEARCH_PATHS', '""')
+ if isinstance(target, build.SharedModule):
+ settings_dict.add_item('LIBRARY_STYLE', 'BUNDLE')
+ settings_dict.add_item('MACH_O_TYPE', 'mh_bundle')
+ elif isinstance(target, build.SharedLibrary):
+ settings_dict.add_item('LIBRARY_STYLE', 'DYNAMIC')
+ self.add_otherargs(settings_dict, langargs)
+ settings_dict.add_item('OTHER_LDFLAGS', f'"{ldstr}"')
+ settings_dict.add_item('OTHER_REZFLAGS', '""')
+ if ' ' in product_name:
+ settings_dict.add_item('PRODUCT_NAME', f'"{product_name}"')
+ else:
+ settings_dict.add_item('PRODUCT_NAME', product_name)
+ settings_dict.add_item('SECTORDER_FLAGS', '""')
+ settings_dict.add_item('SYMROOT', f'"{symroot}"')
+ sysheader_arr = PbxArray()
+ # XCode will change every -I flag that points inside these directories
+ # to an -isystem. Thus set nothing in it since we control our own
+ # include flags.
+ settings_dict.add_item('SYSTEM_HEADER_SEARCH_PATHS', sysheader_arr)
+ settings_dict.add_item('USE_HEADERMAP', 'NO')
+ warn_array = PbxArray()
+ settings_dict.add_item('WARNING_CFLAGS', warn_array)
+ warn_array.add_item('"$(inherited)"')
+ bt_dict.add_item('name', buildtype)
+
+ def add_otherargs(self, settings_dict, langargs):
+ for langname, args in langargs.items():
+ if args:
+ quoted_args = []
+ for a in args:
+ # This works but
+ # a) it's ugly as sin
+ # b) I don't know why it works or why every backslash must be escaped into eight backslashes
+ a = a.replace(chr(92), 8*chr(92)) # chr(92) is backslash, this how we smuggle it in without Python's quoting grabbing it.
+ a = a.replace(r'"', r'\\\"')
+ if ' ' in a or "'" in a:
+ a = r'\"' + a + r'\"'
+ quoted_args.append(a)
+ settings_dict.add_item(f'OTHER_{langname}FLAGS', '"' + ' '.join(quoted_args) + '"')
+
+ def generate_xc_configurationList(self, objects_dict):
+ # FIXME: sort items
+ conf_dict = PbxDict()
+ objects_dict.add_item(self.project_conflist, conf_dict, f'Build configuration list for PBXProject "{self.build.project_name}"')
+ conf_dict.add_item('isa', 'XCConfigurationList')
+ confs_arr = PbxArray()
+ conf_dict.add_item('buildConfigurations', confs_arr)
+ for buildtype in self.buildtypes:
+ confs_arr.add_item(self.project_configurations[buildtype], buildtype)
+ conf_dict.add_item('defaultConfigurationIsVisible', 0)
+ conf_dict.add_item('defaultConfigurationName', self.buildtype)
+
+ # Now the all target
+ all_dict = PbxDict()
+ objects_dict.add_item(self.all_buildconf_id, all_dict, 'Build configuration list for PBXAggregateTarget "ALL_BUILD"')
+ all_dict.add_item('isa', 'XCConfigurationList')
+ conf_arr = PbxArray()
+ all_dict.add_item('buildConfigurations', conf_arr)
+ for buildtype in self.buildtypes:
+ conf_arr.add_item(self.buildall_configurations[buildtype], buildtype)
+ all_dict.add_item('defaultConfigurationIsVisible', 0)
+ all_dict.add_item('defaultConfigurationName', self.buildtype)
+
+ # Test target
+ test_dict = PbxDict()
+ objects_dict.add_item(self.test_buildconf_id, test_dict, 'Build configuration list for PBXAggregateTarget "RUN_TEST"')
+ test_dict.add_item('isa', 'XCConfigurationList')
+ conf_arr = PbxArray()
+ test_dict.add_item('buildConfigurations', conf_arr)
+ for buildtype in self.buildtypes:
+ conf_arr.add_item(self.test_configurations[buildtype], buildtype)
+ test_dict.add_item('defaultConfigurationIsVisible', 0)
+ test_dict.add_item('defaultConfigurationName', self.buildtype)
+
+ # Regen target
+ regen_dict = PbxDict()
+ objects_dict.add_item(self.regen_buildconf_id, test_dict, 'Build configuration list for PBXAggregateTarget "REGENERATE"')
+ regen_dict.add_item('isa', 'XCConfigurationList')
+ conf_arr = PbxArray()
+ regen_dict.add_item('buildConfigurations', conf_arr)
+ for buildtype in self.buildtypes:
+ conf_arr.add_item(self.test_configurations[buildtype], buildtype)
+ regen_dict.add_item('defaultConfigurationIsVisible', 0)
+ regen_dict.add_item('defaultConfigurationName', self.buildtype)
+
+ for target_name in self.build_targets:
+ t_dict = PbxDict()
+ listid = self.buildconflistmap[target_name]
+ objects_dict.add_item(listid, t_dict, f'Build configuration list for PBXNativeTarget "{target_name}"')
+ t_dict.add_item('isa', 'XCConfigurationList')
+ conf_arr = PbxArray()
+ t_dict.add_item('buildConfigurations', conf_arr)
+ idval = self.buildconfmap[target_name][self.buildtype]
+ conf_arr.add_item(idval, self.buildtype)
+ t_dict.add_item('defaultConfigurationIsVisible', 0)
+ t_dict.add_item('defaultConfigurationName', self.buildtype)
+
+ for target_name in self.custom_targets:
+ t_dict = PbxDict()
+ listid = self.buildconflistmap[target_name]
+ objects_dict.add_item(listid, t_dict, f'Build configuration list for PBXAggregateTarget "{target_name}"')
+ t_dict.add_item('isa', 'XCConfigurationList')
+ conf_arr = PbxArray()
+ t_dict.add_item('buildConfigurations', conf_arr)
+ idval = self.buildconfmap[target_name][self.buildtype]
+ conf_arr.add_item(idval, self.buildtype)
+ t_dict.add_item('defaultConfigurationIsVisible', 0)
+ t_dict.add_item('defaultConfigurationName', self.buildtype)
+
+ def generate_prefix(self, pbxdict):
+ pbxdict.add_item('archiveVersion', '1')
+ pbxdict.add_item('classes', PbxDict())
+ pbxdict.add_item('objectVersion', '46')
+ objects_dict = PbxDict()
+ pbxdict.add_item('objects', objects_dict)
+
+ return objects_dict
+
+ def generate_suffix(self, pbxdict):
+ pbxdict.add_item('rootObject', self.project_uid, 'Project object')
diff --git a/meson/mesonbuild/build.py b/meson/mesonbuild/build.py
new file mode 100644
index 000000000..846de8d3b
--- /dev/null
+++ b/meson/mesonbuild/build.py
@@ -0,0 +1,2686 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import OrderedDict
+from functools import lru_cache
+import copy
+import hashlib
+import itertools, pathlib
+import os
+import pickle
+import re
+import textwrap
+import typing as T
+
+from . import environment
+from . import dependencies
+from . import mlog
+from . import programs
+from .mesonlib import (
+ HoldableObject, SecondLevelHolder,
+ File, MesonException, MachineChoice, PerMachine, OrderedSet, listify,
+ extract_as_list, typeslistify, stringlistify, classify_unity_sources,
+ get_filenames_templates_dict, substitute_values, has_path_sep,
+ OptionKey, PerMachineDefaultable,
+ MesonBugException, FileOrString,
+)
+from .compilers import (
+ Compiler, is_object, clink_langs, sort_clink, lang_suffixes,
+ is_known_suffix, detect_static_linker
+)
+from .linkers import StaticLinker
+from .interpreterbase import FeatureNew
+
+if T.TYPE_CHECKING:
+ from ._typing import ImmutableListProtocol, ImmutableSetProtocol
+ from .interpreter.interpreter import Test, SourceOutputs, Interpreter
+ from .mesonlib import FileMode, FileOrString
+ from .modules import ModuleState
+ from .backend.backends import Backend
+
+pch_kwargs = {'c_pch', 'cpp_pch'}
+
+lang_arg_kwargs = {
+ 'c_args',
+ 'cpp_args',
+ 'cuda_args',
+ 'd_args',
+ 'd_import_dirs',
+ 'd_unittest',
+ 'd_module_versions',
+ 'd_debug',
+ 'fortran_args',
+ 'java_args',
+ 'objc_args',
+ 'objcpp_args',
+ 'rust_args',
+ 'vala_args',
+ 'cs_args',
+ 'cython_args',
+}
+
+vala_kwargs = {'vala_header', 'vala_gir', 'vala_vapi'}
+rust_kwargs = {'rust_crate_type'}
+cs_kwargs = {'resources', 'cs_args'}
+
+buildtarget_kwargs = {
+ 'build_by_default',
+ 'build_rpath',
+ 'dependencies',
+ 'extra_files',
+ 'gui_app',
+ 'link_with',
+ 'link_whole',
+ 'link_args',
+ 'link_depends',
+ 'implicit_include_directories',
+ 'include_directories',
+ 'install',
+ 'install_rpath',
+ 'install_dir',
+ 'install_mode',
+ 'name_prefix',
+ 'name_suffix',
+ 'native',
+ 'objects',
+ 'override_options',
+ 'sources',
+ 'gnu_symbol_visibility',
+ 'link_language',
+ 'win_subsystem',
+}
+
+known_build_target_kwargs = (
+ buildtarget_kwargs |
+ lang_arg_kwargs |
+ pch_kwargs |
+ vala_kwargs |
+ rust_kwargs |
+ cs_kwargs)
+
+known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'pie'}
+known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs', 'darwin_versions'}
+known_shmod_kwargs = known_build_target_kwargs | {'vs_module_defs'}
+known_stlib_kwargs = known_build_target_kwargs | {'pic', 'prelink'}
+known_jar_kwargs = known_exe_kwargs | {'main_class'}
+
+@lru_cache(maxsize=None)
+def get_target_macos_dylib_install_name(ld) -> str:
+ name = ['@rpath/', ld.prefix, ld.name]
+ if ld.soversion is not None:
+ name.append('.' + ld.soversion)
+ name.append('.dylib')
+ return ''.join(name)
+
+class InvalidArguments(MesonException):
+ pass
+
+class DependencyOverride(HoldableObject):
+ def __init__(self, dep, node, explicit=True):
+ self.dep = dep
+ self.node = node
+ self.explicit = explicit
+
+class Headers(HoldableObject):
+
+ def __init__(self, sources: T.List[File], install_subdir: T.Optional[str],
+ install_dir: T.Optional[str], install_mode: 'FileMode',
+ subproject: str):
+ self.sources = sources
+ self.install_subdir = install_subdir
+ self.custom_install_dir = install_dir
+ self.custom_install_mode = install_mode
+ self.subproject = subproject
+
+ # TODO: we really don't need any of these methods, but they're preserved to
+ # keep APIs relying on them working.
+
+ def set_install_subdir(self, subdir: str) -> None:
+ self.install_subdir = subdir
+
+ def get_install_subdir(self) -> T.Optional[str]:
+ return self.install_subdir
+
+ def get_sources(self) -> T.List[File]:
+ return self.sources
+
+ def get_custom_install_dir(self) -> T.Optional[str]:
+ return self.custom_install_dir
+
+ def get_custom_install_mode(self) -> 'FileMode':
+ return self.custom_install_mode
+
+
+class Man(HoldableObject):
+
+ def __init__(self, sources: T.List[File], install_dir: T.Optional[str],
+ install_mode: 'FileMode', subproject: str,
+ locale: T.Optional[str]):
+ self.sources = sources
+ self.custom_install_dir = install_dir
+ self.custom_install_mode = install_mode
+ self.subproject = subproject
+ self.locale = locale
+
+ def get_custom_install_dir(self) -> T.Optional[str]:
+ return self.custom_install_dir
+
+ def get_custom_install_mode(self) -> 'FileMode':
+ return self.custom_install_mode
+
+ def get_sources(self) -> T.List['File']:
+ return self.sources
+
+
+class InstallDir(HoldableObject):
+
+ def __init__(self, src_subdir: str, inst_subdir: str, install_dir: str,
+ install_mode: 'FileMode',
+ exclude: T.Tuple[T.Set[str], T.Set[str]],
+ strip_directory: bool, subproject: str,
+ from_source_dir: bool = True):
+ self.source_subdir = src_subdir
+ self.installable_subdir = inst_subdir
+ self.install_dir = install_dir
+ self.install_mode = install_mode
+ self.exclude = exclude
+ self.strip_directory = strip_directory
+ self.from_source_dir = from_source_dir
+ self.subproject = subproject
+
+
+class Build:
+ """A class that holds the status of one build including
+ all dependencies and so on.
+ """
+
+ def __init__(self, environment: environment.Environment):
+ self.project_name = 'name of master project'
+ self.project_version = None
+ self.environment = environment
+ self.projects = {}
+ self.targets: T.MutableMapping[str, 'Target'] = OrderedDict()
+ self.run_target_names: T.Set[T.Tuple[str, str]] = set()
+ self.global_args: PerMachine[T.Dict[str, T.List[str]]] = PerMachine({}, {})
+ self.global_link_args: PerMachine[T.Dict[str, T.List[str]]] = PerMachine({}, {})
+ self.projects_args: PerMachine[T.Dict[str, T.Dict[str, T.List[str]]]] = PerMachine({}, {})
+ self.projects_link_args: PerMachine[T.Dict[str, T.Dict[str, T.List[str]]]] = PerMachine({}, {})
+ self.tests: T.List['Test'] = []
+ self.benchmarks: T.List['Test'] = []
+ self.headers: T.List[Headers] = []
+ self.man: T.List[Man] = []
+ self.data: T.List[Data] = []
+ self.static_linker: PerMachine[StaticLinker] = PerMachine(None, None)
+ self.subprojects = {}
+ self.subproject_dir = ''
+ self.install_scripts = []
+ self.postconf_scripts = []
+ self.dist_scripts = []
+ self.install_dirs: T.List[InstallDir] = []
+ self.dep_manifest_name = None
+ self.dep_manifest = {}
+ self.stdlibs = PerMachine({}, {})
+ self.test_setups: T.Dict[str, TestSetup] = {}
+ self.test_setup_default_name = None
+ self.find_overrides = {}
+ self.searched_programs = set() # The list of all programs that have been searched for.
+
+ # If we are doing a cross build we need two caches, if we're doing a
+ # build == host compilation the both caches should point to the same place.
+ self.dependency_overrides: PerMachine[T.Dict[T.Tuple, DependencyOverride]] = PerMachineDefaultable.default(
+ environment.is_cross_build(), {}, {})
+ self.devenv: T.List[EnvironmentVariables] = []
+
+ def get_build_targets(self):
+ build_targets = OrderedDict()
+ for name, t in self.targets.items():
+ if isinstance(t, BuildTarget):
+ build_targets[name] = t
+ return build_targets
+
+ def get_custom_targets(self):
+ custom_targets = OrderedDict()
+ for name, t in self.targets.items():
+ if isinstance(t, CustomTarget):
+ custom_targets[name] = t
+ return custom_targets
+
+ def copy(self):
+ other = Build(self.environment)
+ for k, v in self.__dict__.items():
+ if isinstance(v, (list, dict, set, OrderedDict)):
+ other.__dict__[k] = v.copy()
+ else:
+ other.__dict__[k] = v
+ return other
+
+ def merge(self, other):
+ for k, v in other.__dict__.items():
+ self.__dict__[k] = v
+
+ def ensure_static_linker(self, compiler):
+ if self.static_linker[compiler.for_machine] is None and compiler.needs_static_linker():
+ self.static_linker[compiler.for_machine] = detect_static_linker(self.environment, compiler)
+
+ def get_project(self):
+ return self.projects['']
+
+ def get_subproject_dir(self):
+ return self.subproject_dir
+
+ def get_targets(self) -> T.Dict[str, 'Target']:
+ return self.targets
+
+ def get_tests(self) -> T.List['Test']:
+ return self.tests
+
+ def get_benchmarks(self) -> T.List['Test']:
+ return self.benchmarks
+
+ def get_headers(self):
+ return self.headers
+
+ def get_man(self):
+ return self.man
+
+ def get_data(self):
+ return self.data
+
+ def get_install_subdirs(self):
+ return self.install_dirs
+
+ def get_global_args(self, compiler: 'Compiler', for_machine: 'MachineChoice') -> T.List[str]:
+ d = self.global_args[for_machine]
+ return d.get(compiler.get_language(), [])
+
+ def get_project_args(self, compiler: 'Compiler', project: str, for_machine: 'MachineChoice') -> T.List[str]:
+ d = self.projects_args[for_machine]
+ args = d.get(project)
+ if not args:
+ return []
+ return args.get(compiler.get_language(), [])
+
+ def get_global_link_args(self, compiler: 'Compiler', for_machine: 'MachineChoice') -> T.List[str]:
+ d = self.global_link_args[for_machine]
+ return d.get(compiler.get_language(), [])
+
+ def get_project_link_args(self, compiler: 'Compiler', project: str, for_machine: 'MachineChoice') -> T.List[str]:
+ d = self.projects_link_args[for_machine]
+
+ link_args = d.get(project)
+ if not link_args:
+ return []
+
+ return link_args.get(compiler.get_language(), [])
+
+class IncludeDirs(HoldableObject):
+
+ """Internal representation of an include_directories call."""
+
+ def __init__(self, curdir: str, dirs: T.List[str], is_system: bool, extra_build_dirs: T.Optional[T.List[str]] = None):
+ self.curdir = curdir
+ self.incdirs = dirs
+ self.is_system = is_system
+
+ # Interpreter has validated that all given directories
+ # actually exist.
+ self.extra_build_dirs: T.List[str] = extra_build_dirs or []
+
+ def __repr__(self) -> str:
+ r = '<{} {}/{}>'
+ return r.format(self.__class__.__name__, self.curdir, self.incdirs)
+
+ def get_curdir(self) -> str:
+ return self.curdir
+
+ def get_incdirs(self) -> T.List[str]:
+ return self.incdirs
+
+ def get_extra_build_dirs(self) -> T.List[str]:
+ return self.extra_build_dirs
+
+ def to_string_list(self, sourcedir: str) -> T.List[str]:
+ """Convert IncludeDirs object to a list of strings."""
+ strlist: T.List[str] = []
+ for idir in self.incdirs:
+ strlist.append(os.path.join(sourcedir, self.curdir, idir))
+ return strlist
+
+class ExtractedObjects(HoldableObject):
+ '''
+ Holds a list of sources for which the objects must be extracted
+ '''
+ def __init__(self, target, srclist=None, genlist=None, objlist=None, recursive=True):
+ self.target = target
+ self.recursive = recursive
+ self.srclist = srclist if srclist is not None else []
+ self.genlist = genlist if genlist is not None else []
+ self.objlist = objlist if objlist is not None else []
+ if self.target.is_unity:
+ self.check_unity_compatible()
+
+ def __repr__(self):
+ r = '<{0} {1!r}: {2}>'
+ return r.format(self.__class__.__name__, self.target.name, self.srclist)
+
+ @staticmethod
+ def get_sources(sources, generated_sources):
+ # Merge sources and generated sources
+ sources = list(sources)
+ for gensrc in generated_sources:
+ for s in gensrc.get_outputs():
+ # We cannot know the path where this source will be generated,
+ # but all we need here is the file extension to determine the
+ # compiler.
+ sources.append(s)
+
+ # Filter out headers and all non-source files
+ return [s for s in sources if environment.is_source(s) and not environment.is_header(s)]
+
+ def classify_all_sources(self, sources, generated_sources):
+ sources = self.get_sources(sources, generated_sources)
+ return classify_unity_sources(self.target.compilers.values(), sources)
+
+ def check_unity_compatible(self):
+ # Figure out if the extracted object list is compatible with a Unity
+ # build. When we're doing a Unified build, we go through the sources,
+ # and create a single source file from each subset of the sources that
+ # can be compiled with a specific compiler. Then we create one object
+ # from each unified source file. So for each compiler we can either
+ # extra all its sources or none.
+ cmpsrcs = self.classify_all_sources(self.target.sources, self.target.generated)
+ extracted_cmpsrcs = self.classify_all_sources(self.srclist, self.genlist)
+
+ for comp, srcs in extracted_cmpsrcs.items():
+ if set(srcs) != set(cmpsrcs[comp]):
+ raise MesonException('Single object files can not be extracted '
+ 'in Unity builds. You can only extract all '
+ 'the object files for each compiler at once.')
+
+ def get_outputs(self, backend):
+ return [
+ backend.object_filename_from_source(self.target, source)
+ for source in self.get_sources(self.srclist, self.genlist)
+ ]
+
+class EnvironmentVariables(HoldableObject):
+ def __init__(self) -> None:
+ self.envvars = []
+ # The set of all env vars we have operations for. Only used for self.has_name()
+ self.varnames = set()
+
+ def __repr__(self):
+ repr_str = "<{0}: {1}>"
+ return repr_str.format(self.__class__.__name__, self.envvars)
+
+ def has_name(self, name: str) -> bool:
+ return name in self.varnames
+
+ def set(self, name: str, values: T.List[str], separator: str = os.pathsep) -> None:
+ self.varnames.add(name)
+ self.envvars.append((self._set, name, values, separator))
+
+ def append(self, name: str, values: T.List[str], separator: str = os.pathsep) -> None:
+ self.varnames.add(name)
+ self.envvars.append((self._append, name, values, separator))
+
+ def prepend(self, name: str, values: T.List[str], separator: str = os.pathsep) -> None:
+ self.varnames.add(name)
+ self.envvars.append((self._prepend, name, values, separator))
+
+ def _set(self, env: T.Dict[str, str], name: str, values: T.List[str], separator: str) -> str:
+ return separator.join(values)
+
+ def _append(self, env: T.Dict[str, str], name: str, values: T.List[str], separator: str) -> str:
+ curr = env.get(name)
+ return separator.join(values if curr is None else [curr] + values)
+
+ def _prepend(self, env: T.Dict[str, str], name: str, values: T.List[str], separator: str) -> str:
+ curr = env.get(name)
+ return separator.join(values if curr is None else values + [curr])
+
+ def get_env(self, full_env: T.Dict[str, str]) -> T.Dict[str, str]:
+ env = full_env.copy()
+ for method, name, values, separator in self.envvars:
+ env[name] = method(env, name, values, separator)
+ return env
+
+class Target(HoldableObject):
+
+ # TODO: should Target be an abc.ABCMeta?
+
+ def __init__(self, name: str, subdir: str, subproject: str, build_by_default: bool, for_machine: MachineChoice):
+ if has_path_sep(name):
+ # Fix failing test 53 when this becomes an error.
+ mlog.warning(textwrap.dedent(f'''\
+ Target "{name}" has a path separator in its name.
+ This is not supported, it can cause unexpected failures and will become
+ a hard error in the future.\
+ '''))
+ self.name = name
+ self.subdir = subdir
+ self.subproject = subproject
+ self.build_by_default = build_by_default
+ self.for_machine = for_machine
+ self.install = False
+ self.build_always_stale = False
+ self.option_overrides_base: T.Dict[OptionKey, str] = {}
+ self.option_overrides_compiler: T.Dict[OptionKey, str] = {}
+ self.extra_files = [] # type: T.List[File]
+ if not hasattr(self, 'typename'):
+ raise RuntimeError(f'Target type is not set for target class "{type(self).__name__}". This is a bug')
+
+ def __lt__(self, other: object) -> bool:
+ if not hasattr(other, 'get_id') and not callable(other.get_id):
+ return NotImplemented
+ return self.get_id() < other.get_id()
+
+ def __le__(self, other: object) -> bool:
+ if not hasattr(other, 'get_id') and not callable(other.get_id):
+ return NotImplemented
+ return self.get_id() <= other.get_id()
+
+ def __gt__(self, other: object) -> bool:
+ if not hasattr(other, 'get_id') and not callable(other.get_id):
+ return NotImplemented
+ return self.get_id() > other.get_id()
+
+ def __ge__(self, other: object) -> bool:
+ if not hasattr(other, 'get_id') and not callable(other.get_id):
+ return NotImplemented
+ return self.get_id() >= other.get_id()
+
+ def get_default_install_dir(self, env: environment.Environment) -> str:
+ raise NotImplementedError
+
+ def get_install_dir(self, environment: environment.Environment) -> T.Tuple[T.Any, bool]:
+ # Find the installation directory.
+ default_install_dir = self.get_default_install_dir(environment)
+ outdirs = self.get_custom_install_dir()
+ if outdirs[0] is not None and outdirs[0] != default_install_dir and outdirs[0] is not True:
+ # Either the value is set to a non-default value, or is set to
+ # False (which means we want this specific output out of many
+ # outputs to not be installed).
+ custom_install_dir = True
+ else:
+ custom_install_dir = False
+ outdirs[0] = default_install_dir
+ return outdirs, custom_install_dir
+
+ def get_basename(self) -> str:
+ return self.name
+
+ def get_subdir(self) -> str:
+ return self.subdir
+
+ def get_typename(self) -> str:
+ return self.typename
+
+ @staticmethod
+ def _get_id_hash(target_id):
+ # We don't really need cryptographic security here.
+ # Small-digest hash function with unlikely collision is good enough.
+ h = hashlib.sha256()
+ h.update(target_id.encode(encoding='utf-8', errors='replace'))
+ # This ID should be case-insensitive and should work in Visual Studio,
+ # e.g. it should not start with leading '-'.
+ return h.hexdigest()[:7]
+
+ @staticmethod
+ def construct_id_from_path(subdir: str, name: str, type_suffix: str) -> str:
+ """Construct target ID from subdir, name and type suffix.
+
+ This helper function is made public mostly for tests."""
+ # This ID must also be a valid file name on all OSs.
+ # It should also avoid shell metacharacters for obvious
+ # reasons. '@' is not used as often as '_' in source code names.
+ # In case of collisions consider using checksums.
+ # FIXME replace with assert when slash in names is prohibited
+ name_part = name.replace('/', '@').replace('\\', '@')
+ assert not has_path_sep(type_suffix)
+ my_id = name_part + type_suffix
+ if subdir:
+ subdir_part = Target._get_id_hash(subdir)
+ # preserve myid for better debuggability
+ return subdir_part + '@@' + my_id
+ return my_id
+
+ def get_id(self) -> str:
+ return self.construct_id_from_path(
+ self.subdir, self.name, self.type_suffix())
+
+ def process_kwargs_base(self, kwargs: T.Dict[str, T.Any]) -> None:
+ if 'build_by_default' in kwargs:
+ self.build_by_default = kwargs['build_by_default']
+ if not isinstance(self.build_by_default, bool):
+ raise InvalidArguments('build_by_default must be a boolean value.')
+ elif kwargs.get('install', False):
+ # For backward compatibility, if build_by_default is not explicitly
+ # set, use the value of 'install' if it's enabled.
+ self.build_by_default = True
+
+ option_overrides = self.parse_overrides(kwargs)
+
+ for k, v in option_overrides.items():
+ if k.lang:
+ self.option_overrides_compiler[k.evolve(machine=self.for_machine)] = v
+ continue
+ self.option_overrides_base[k] = v
+
+ @staticmethod
+ def parse_overrides(kwargs: T.Dict[str, T.Any]) -> T.Dict[OptionKey, str]:
+ result: T.Dict[OptionKey, str] = {}
+ overrides = stringlistify(kwargs.get('override_options', []))
+ for o in overrides:
+ if '=' not in o:
+ raise InvalidArguments('Overrides must be of form "key=value"')
+ k, v = o.split('=', 1)
+ key = OptionKey.from_string(k.strip())
+ v = v.strip()
+ result[key] = v
+ return result
+
+ def is_linkable_target(self) -> bool:
+ return False
+
+ def get_outputs(self) -> T.List[str]:
+ return []
+
+ def should_install(self) -> bool:
+ return False
+
+class BuildTarget(Target):
+ known_kwargs = known_build_target_kwargs
+
+ def __init__(self, name: str, subdir: str, subproject: str, for_machine: MachineChoice,
+ sources: T.List['SourceOutputs'], objects, environment: environment.Environment, kwargs):
+ super().__init__(name, subdir, subproject, True, for_machine)
+ unity_opt = environment.coredata.get_option(OptionKey('unity'))
+ self.is_unity = unity_opt == 'on' or (unity_opt == 'subprojects' and subproject != '')
+ self.environment = environment
+ self.compilers = OrderedDict() # type: OrderedDict[str, Compiler]
+ self.objects = []
+ self.external_deps = []
+ self.include_dirs = []
+ self.link_language = kwargs.get('link_language')
+ self.link_targets: T.List[BuildTarget] = []
+ self.link_whole_targets = []
+ self.link_depends = []
+ self.added_deps = set()
+ self.name_prefix_set = False
+ self.name_suffix_set = False
+ self.filename = 'no_name'
+ # The list of all files outputted by this target. Useful in cases such
+ # as Vala which generates .vapi and .h besides the compiled output.
+ self.outputs = [self.filename]
+ self.need_install = False
+ self.pch = {}
+ self.extra_args: T.Dict[str, T.List['FileOrString']] = {}
+ self.sources: T.List[File] = []
+ self.generated: T.List[T.Union[GeneratedList, CustomTarget, CustomTargetIndex]] = []
+ self.d_features = {}
+ self.pic = False
+ self.pie = False
+ # Track build_rpath entries so we can remove them at install time
+ self.rpath_dirs_to_remove: T.Set[bytes] = set()
+ self.process_sourcelist(sources)
+ # Objects can be:
+ # 1. Pre-existing objects provided by the user with the `objects:` kwarg
+ # 2. Compiled objects created by and extracted from another target
+ self.process_objectlist(objects)
+ self.process_kwargs(kwargs, environment)
+ self.check_unknown_kwargs(kwargs)
+ self.process_compilers()
+ if not any([self.sources, self.generated, self.objects, self.link_whole]):
+ raise InvalidArguments(f'Build target {name} has no sources.')
+ self.process_compilers_late()
+ self.validate_sources()
+ self.validate_install(environment)
+ self.check_module_linking()
+
+ def __repr__(self):
+ repr_str = "<{0} {1}: {2}>"
+ return repr_str.format(self.__class__.__name__, self.get_id(), self.filename)
+
+ def __str__(self):
+ return f"{self.name}"
+
+ def validate_install(self, environment):
+ if self.for_machine is MachineChoice.BUILD and self.need_install:
+ if environment.is_cross_build():
+ raise InvalidArguments('Tried to install a target for the build machine in a cross build.')
+ else:
+ mlog.warning('Installing target build for the build machine. This will fail in a cross build.')
+
+ def check_unknown_kwargs(self, kwargs):
+ # Override this method in derived classes that have more
+ # keywords.
+ self.check_unknown_kwargs_int(kwargs, self.known_kwargs)
+
+ def check_unknown_kwargs_int(self, kwargs, known_kwargs):
+ unknowns = []
+ for k in kwargs:
+ if k not in known_kwargs:
+ unknowns.append(k)
+ if len(unknowns) > 0:
+ mlog.warning('Unknown keyword argument(s) in target {}: {}.'.format(self.name, ', '.join(unknowns)))
+
+ def process_objectlist(self, objects):
+ assert(isinstance(objects, list))
+ for s in objects:
+ if isinstance(s, (str, File, ExtractedObjects)):
+ self.objects.append(s)
+ elif isinstance(s, (GeneratedList, CustomTarget)):
+ msg = 'Generated files are not allowed in the \'objects\' kwarg ' + \
+ f'for target {self.name!r}.\nIt is meant only for ' + \
+ 'pre-built object files that are shipped with the\nsource ' + \
+ 'tree. Try adding it in the list of sources.'
+ raise InvalidArguments(msg)
+ else:
+ raise InvalidArguments(f'Bad object of type {type(s).__name__!r} in target {self.name!r}.')
+
+ def process_sourcelist(self, sources: T.List['SourceOutputs']) -> None:
+ """Split sources into generated and static sources.
+
+ Sources can be:
+ 1. Pre-existing source files in the source tree (static)
+ 2. Pre-existing sources generated by configure_file in the build tree.
+ (static as they are only regenerated if meson itself is regenerated)
+ 3. Sources files generated by another target or a Generator (generated)
+ """
+ added_sources: T.Set[File] = set() # If the same source is defined multiple times, use it only once.
+ for s in sources:
+ if isinstance(s, File):
+ if s not in added_sources:
+ self.sources.append(s)
+ added_sources.add(s)
+ elif isinstance(s, (CustomTarget, CustomTargetIndex, GeneratedList)):
+ self.generated.append(s)
+
+ @staticmethod
+ def can_compile_remove_sources(compiler: 'Compiler', sources: T.List['FileOrString']) -> bool:
+ removed = False
+ for s in sources[:]:
+ if compiler.can_compile(s):
+ sources.remove(s)
+ removed = True
+ return removed
+
+ def process_compilers_late(self):
+ """Processes additional compilers after kwargs have been evaluated.
+
+ This can add extra compilers that might be required by keyword
+ arguments, such as link_with or dependencies. It will also try to guess
+ which compiler to use if one hasn't been selected already.
+ """
+ # Populate list of compilers
+ compilers = self.environment.coredata.compilers[self.for_machine]
+
+ # did user override clink_langs for this target?
+ link_langs = [self.link_language] if self.link_language else clink_langs
+
+ # If this library is linked against another library we need to consider
+ # the languages of those libraries as well.
+ if self.link_targets or self.link_whole_targets:
+ extra = set()
+ for t in itertools.chain(self.link_targets, self.link_whole_targets):
+ if isinstance(t, CustomTarget) or isinstance(t, CustomTargetIndex):
+ continue # We can't know anything about these.
+ for name, compiler in t.compilers.items():
+ if name in link_langs:
+ extra.add((name, compiler))
+ for name, compiler in sorted(extra, key=lambda p: sort_clink(p[0])):
+ self.compilers[name] = compiler
+
+ if not self.compilers:
+ # No source files or parent targets, target consists of only object
+ # files of unknown origin. Just add the first clink compiler
+ # that we have and hope that it can link these objects
+ for lang in link_langs:
+ if lang in compilers:
+ self.compilers[lang] = compilers[lang]
+ break
+
+ def process_compilers(self):
+ '''
+ Populate self.compilers, which is the list of compilers that this
+ target will use for compiling all its sources.
+ We also add compilers that were used by extracted objects to simplify
+ dynamic linker determination.
+ '''
+ if not self.sources and not self.generated and not self.objects:
+ return
+ # Populate list of compilers
+ compilers = self.environment.coredata.compilers[self.for_machine]
+ # Pre-existing sources
+ sources = list(self.sources)
+ # All generated sources
+ for gensrc in self.generated:
+ for s in gensrc.get_outputs():
+ # Generated objects can't be compiled, so don't use them for
+ # compiler detection. If our target only has generated objects,
+ # we will fall back to using the first c-like compiler we find,
+ # which is what we need.
+ if not is_object(s):
+ sources.append(s)
+ for d in self.external_deps:
+ for s in d.sources:
+ if isinstance(s, (str, File)):
+ sources.append(s)
+
+ # Sources that were used to create our extracted objects
+ for o in self.objects:
+ if not isinstance(o, ExtractedObjects):
+ continue
+ for s in o.srclist:
+ # Don't add Vala sources since that will pull in the Vala
+ # compiler even though we will never use it since we are
+ # dealing with compiled C code.
+ if not s.endswith(lang_suffixes['vala']):
+ sources.append(s)
+ if sources:
+ # For each source, try to add one compiler that can compile it.
+ #
+ # If it has a suffix that belongs to a known language, we must have
+ # a compiler for that language.
+ #
+ # Otherwise, it's ok if no compilers can compile it, because users
+ # are expected to be able to add arbitrary non-source files to the
+ # sources list
+ for s in sources:
+ for lang, compiler in compilers.items():
+ if compiler.can_compile(s):
+ if lang not in self.compilers:
+ self.compilers[lang] = compiler
+ break
+ else:
+ if is_known_suffix(s):
+ raise MesonException('No {} machine compiler for "{}"'.
+ format(self.for_machine.get_lower_case_name(), s))
+
+ # Re-sort according to clink_langs
+ self.compilers = OrderedDict(sorted(self.compilers.items(),
+ key=lambda t: sort_clink(t[0])))
+
+ # If all our sources are Vala, our target also needs the C compiler but
+ # it won't get added above.
+ if ('vala' in self.compilers or 'cython' in self.compilers) and 'c' not in self.compilers:
+ self.compilers['c'] = compilers['c']
+
+ def validate_sources(self):
+ if not self.sources:
+ return
+ for lang in ('cs', 'java'):
+ if lang in self.compilers:
+ check_sources = list(self.sources)
+ compiler = self.compilers[lang]
+ if not self.can_compile_remove_sources(compiler, check_sources):
+ raise InvalidArguments(f'No {lang} sources found in target {self.name!r}')
+ if check_sources:
+ m = '{0} targets can only contain {0} files:\n'.format(lang.capitalize())
+ m += '\n'.join([repr(c) for c in check_sources])
+ raise InvalidArguments(m)
+ # CSharp and Java targets can't contain any other file types
+ assert(len(self.compilers) == 1)
+ return
+
+ def process_link_depends(self, sources, environment):
+ """Process the link_depends keyword argument.
+
+ This is designed to handle strings, Files, and the output of Custom
+ Targets. Notably it doesn't handle generator() returned objects, since
+ adding them as a link depends would inherently cause them to be
+ generated twice, since the output needs to be passed to the ld_args and
+ link_depends.
+ """
+ sources = listify(sources)
+ for s in sources:
+ if isinstance(s, File):
+ self.link_depends.append(s)
+ elif isinstance(s, str):
+ self.link_depends.append(
+ File.from_source_file(environment.source_dir, self.subdir, s))
+ elif hasattr(s, 'get_outputs'):
+ self.link_depends.extend(
+ [File.from_built_file(s.get_subdir(), p) for p in s.get_outputs()])
+ else:
+ raise InvalidArguments(
+ 'Link_depends arguments must be strings, Files, '
+ 'or a Custom Target, or lists thereof.')
+
+ def get_original_kwargs(self):
+ return self.kwargs
+
+ def copy_kwargs(self, kwargs):
+ self.kwargs = copy.copy(kwargs)
+ for k, v in self.kwargs.items():
+ if isinstance(v, list):
+ self.kwargs[k] = listify(v, flatten=True)
+ for t in ['dependencies', 'link_with', 'include_directories', 'sources']:
+ if t in self.kwargs:
+ self.kwargs[t] = listify(self.kwargs[t], flatten=True)
+
+ def extract_objects(self, srclist: T.List[FileOrString]) -> ExtractedObjects:
+ obj_src = []
+ sources_set = set(self.sources)
+ for src in srclist:
+ if isinstance(src, str):
+ src = File(False, self.subdir, src)
+ elif isinstance(src, File):
+ FeatureNew.single_use('File argument for extract_objects', '0.50.0', self.subproject)
+ else:
+ raise MesonException(f'Object extraction arguments must be strings or Files (got {type(src).__name__}).')
+ # FIXME: It could be a generated source
+ if src not in sources_set:
+ raise MesonException(f'Tried to extract unknown source {src}.')
+ obj_src.append(src)
+ return ExtractedObjects(self, obj_src)
+
+ def extract_all_objects(self, recursive: bool = True) -> ExtractedObjects:
+ return ExtractedObjects(self, self.sources, self.generated, self.objects,
+ recursive)
+
+ def get_all_link_deps(self):
+ return self.get_transitive_link_deps()
+
+ @lru_cache(maxsize=None)
+ def get_transitive_link_deps(self) -> 'ImmutableListProtocol[Target]':
+ result: T.List[Target] = []
+ for i in self.link_targets:
+ result += i.get_all_link_deps()
+ return result
+
+ def get_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]:
+ return self.get_transitive_link_deps_mapping(prefix, environment)
+
+ @lru_cache(maxsize=None)
+ def get_transitive_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]:
+ result: T.Dict[str, str] = {}
+ for i in self.link_targets:
+ mapping = i.get_link_deps_mapping(prefix, environment)
+ #we are merging two dictionaries, while keeping the earlier one dominant
+ result_tmp = mapping.copy()
+ result_tmp.update(result)
+ result = result_tmp
+ return result
+
+ @lru_cache(maxsize=None)
+ def get_link_dep_subdirs(self) -> 'ImmutableSetProtocol[str]':
+ result: OrderedSet[str] = OrderedSet()
+ for i in self.link_targets:
+ if not isinstance(i, StaticLibrary):
+ result.add(i.get_subdir())
+ result.update(i.get_link_dep_subdirs())
+ return result
+
+ def get_default_install_dir(self, environment: environment.Environment) -> str:
+ return environment.get_libdir()
+
+ def get_custom_install_dir(self):
+ return self.install_dir
+
+ def get_custom_install_mode(self):
+ return self.install_mode
+
+ def process_kwargs(self, kwargs, environment):
+ self.process_kwargs_base(kwargs)
+ self.copy_kwargs(kwargs)
+ kwargs.get('modules', [])
+ self.need_install = kwargs.get('install', self.need_install)
+ llist = extract_as_list(kwargs, 'link_with')
+ for linktarget in llist:
+ if isinstance(linktarget, dependencies.ExternalLibrary):
+ raise MesonException(textwrap.dedent('''\
+ An external library was used in link_with keyword argument, which
+ is reserved for libraries built as part of this project. External
+ libraries must be passed using the dependencies keyword argument
+ instead, because they are conceptually "external dependencies",
+ just like those detected with the dependency() function.\
+ '''))
+ self.link(linktarget)
+ lwhole = extract_as_list(kwargs, 'link_whole')
+ for linktarget in lwhole:
+ self.link_whole(linktarget)
+
+ c_pchlist, cpp_pchlist, clist, cpplist, cudalist, cslist, valalist, objclist, objcpplist, fortranlist, rustlist \
+ = [extract_as_list(kwargs, c) for c in ['c_pch', 'cpp_pch', 'c_args', 'cpp_args', 'cuda_args', 'cs_args', 'vala_args', 'objc_args', 'objcpp_args', 'fortran_args', 'rust_args']]
+
+ self.add_pch('c', c_pchlist)
+ self.add_pch('cpp', cpp_pchlist)
+ compiler_args = {'c': clist, 'cpp': cpplist, 'cuda': cudalist, 'cs': cslist, 'vala': valalist, 'objc': objclist, 'objcpp': objcpplist,
+ 'fortran': fortranlist, 'rust': rustlist
+ }
+ for key, value in compiler_args.items():
+ self.add_compiler_args(key, value)
+
+ if not isinstance(self, Executable) or 'export_dynamic' in kwargs:
+ self.vala_header = kwargs.get('vala_header', self.name + '.h')
+ self.vala_vapi = kwargs.get('vala_vapi', self.name + '.vapi')
+ self.vala_gir = kwargs.get('vala_gir', None)
+
+ dlist = stringlistify(kwargs.get('d_args', []))
+ self.add_compiler_args('d', dlist)
+ dfeatures = dict()
+ dfeature_unittest = kwargs.get('d_unittest', False)
+ if dfeature_unittest:
+ dfeatures['unittest'] = dfeature_unittest
+ dfeature_versions = kwargs.get('d_module_versions', [])
+ if dfeature_versions:
+ dfeatures['versions'] = dfeature_versions
+ dfeature_debug = kwargs.get('d_debug', [])
+ if dfeature_debug:
+ dfeatures['debug'] = dfeature_debug
+ if 'd_import_dirs' in kwargs:
+ dfeature_import_dirs = extract_as_list(kwargs, 'd_import_dirs')
+ for d in dfeature_import_dirs:
+ if not isinstance(d, IncludeDirs):
+ raise InvalidArguments('Arguments to d_import_dirs must be include_directories.')
+ dfeatures['import_dirs'] = dfeature_import_dirs
+ if dfeatures:
+ self.d_features = dfeatures
+
+ self.link_args = extract_as_list(kwargs, 'link_args')
+ for i in self.link_args:
+ if not isinstance(i, str):
+ raise InvalidArguments('Link_args arguments must be strings.')
+ for l in self.link_args:
+ if '-Wl,-rpath' in l or l.startswith('-rpath'):
+ mlog.warning(textwrap.dedent('''\
+ Please do not define rpath with a linker argument, use install_rpath
+ or build_rpath properties instead.
+ This will become a hard error in a future Meson release.\
+ '''))
+ self.process_link_depends(kwargs.get('link_depends', []), environment)
+ # Target-specific include dirs must be added BEFORE include dirs from
+ # internal deps (added inside self.add_deps()) to override them.
+ inclist = extract_as_list(kwargs, 'include_directories')
+ self.add_include_dirs(inclist)
+ # Add dependencies (which also have include_directories)
+ deplist = extract_as_list(kwargs, 'dependencies')
+ self.add_deps(deplist)
+ # If an item in this list is False, the output corresponding to
+ # the list index of that item will not be installed
+ self.install_dir = typeslistify(kwargs.get('install_dir', [None]),
+ (str, bool))
+ self.install_mode = kwargs.get('install_mode', None)
+ main_class = kwargs.get('main_class', '')
+ if not isinstance(main_class, str):
+ raise InvalidArguments('Main class must be a string')
+ self.main_class = main_class
+ if isinstance(self, Executable):
+ # This kwarg is deprecated. The value of "none" means that the kwarg
+ # was not specified and win_subsystem should be used instead.
+ self.gui_app = None
+ if 'gui_app' in kwargs:
+ if 'win_subsystem' in kwargs:
+ raise InvalidArguments('Can specify only gui_app or win_subsystem for a target, not both.')
+ self.gui_app = kwargs['gui_app']
+ if not isinstance(self.gui_app, bool):
+ raise InvalidArguments('Argument gui_app must be boolean.')
+ self.win_subsystem = self.validate_win_subsystem(kwargs.get('win_subsystem', 'console'))
+ elif 'gui_app' in kwargs:
+ raise InvalidArguments('Argument gui_app can only be used on executables.')
+ elif 'win_subsystem' in kwargs:
+ raise InvalidArguments('Argument win_subsystem can only be used on executables.')
+ extra_files = extract_as_list(kwargs, 'extra_files')
+ for i in extra_files:
+ assert(isinstance(i, File))
+ trial = os.path.join(environment.get_source_dir(), i.subdir, i.fname)
+ if not(os.path.isfile(trial)):
+ raise InvalidArguments(f'Tried to add non-existing extra file {i}.')
+ self.extra_files = extra_files
+ self.install_rpath: str = kwargs.get('install_rpath', '')
+ if not isinstance(self.install_rpath, str):
+ raise InvalidArguments('Install_rpath is not a string.')
+ self.build_rpath = kwargs.get('build_rpath', '')
+ if not isinstance(self.build_rpath, str):
+ raise InvalidArguments('Build_rpath is not a string.')
+ resources = extract_as_list(kwargs, 'resources')
+ for r in resources:
+ if not isinstance(r, str):
+ raise InvalidArguments('Resource argument is not a string.')
+ trial = os.path.join(environment.get_source_dir(), self.subdir, r)
+ if not os.path.isfile(trial):
+ raise InvalidArguments(f'Tried to add non-existing resource {r}.')
+ self.resources = resources
+ if 'name_prefix' in kwargs:
+ name_prefix = kwargs['name_prefix']
+ if isinstance(name_prefix, list):
+ if name_prefix:
+ raise InvalidArguments('name_prefix array must be empty to signify default.')
+ else:
+ if not isinstance(name_prefix, str):
+ raise InvalidArguments('name_prefix must be a string.')
+ self.prefix = name_prefix
+ self.name_prefix_set = True
+ if 'name_suffix' in kwargs:
+ name_suffix = kwargs['name_suffix']
+ if isinstance(name_suffix, list):
+ if name_suffix:
+ raise InvalidArguments('name_suffix array must be empty to signify default.')
+ else:
+ if not isinstance(name_suffix, str):
+ raise InvalidArguments('name_suffix must be a string.')
+ if name_suffix == '':
+ raise InvalidArguments('name_suffix should not be an empty string. '
+ 'If you want meson to use the default behaviour '
+ 'for each platform pass `[]` (empty array)')
+ self.suffix = name_suffix
+ self.name_suffix_set = True
+ if isinstance(self, StaticLibrary):
+ # You can't disable PIC on OS X. The compiler ignores -fno-PIC.
+ # PIC is always on for Windows (all code is position-independent
+ # since library loading is done differently)
+ m = self.environment.machines[self.for_machine]
+ if m.is_darwin() or m.is_windows():
+ self.pic = True
+ else:
+ self.pic = self._extract_pic_pie(kwargs, 'pic', environment, 'b_staticpic')
+ if isinstance(self, Executable) or (isinstance(self, StaticLibrary) and not self.pic):
+ # Executables must be PIE on Android
+ if self.environment.machines[self.for_machine].is_android():
+ self.pie = True
+ else:
+ self.pie = self._extract_pic_pie(kwargs, 'pie', environment, 'b_pie')
+ self.implicit_include_directories = kwargs.get('implicit_include_directories', True)
+ if not isinstance(self.implicit_include_directories, bool):
+ raise InvalidArguments('Implicit_include_directories must be a boolean.')
+ self.gnu_symbol_visibility = kwargs.get('gnu_symbol_visibility', '')
+ if not isinstance(self.gnu_symbol_visibility, str):
+ raise InvalidArguments('GNU symbol visibility must be a string.')
+ if self.gnu_symbol_visibility != '':
+ permitted = ['default', 'internal', 'hidden', 'protected', 'inlineshidden']
+ if self.gnu_symbol_visibility not in permitted:
+ raise InvalidArguments('GNU symbol visibility arg {} not one of: {}'.format(self.symbol_visibility, ', '.join(permitted)))
+
+ def validate_win_subsystem(self, value: str) -> str:
+ value = value.lower()
+ if re.fullmatch(r'(boot_application|console|efi_application|efi_boot_service_driver|efi_rom|efi_runtime_driver|native|posix|windows)(,\d+(\.\d+)?)?', value) is None:
+ raise InvalidArguments(f'Invalid value for win_subsystem: {value}.')
+ return value
+
+ def _extract_pic_pie(self, kwargs, arg: str, environment, option: str):
+ # Check if we have -fPIC, -fpic, -fPIE, or -fpie in cflags
+ all_flags = self.extra_args['c'] + self.extra_args['cpp']
+ if '-f' + arg.lower() in all_flags or '-f' + arg.upper() in all_flags:
+ mlog.warning(f"Use the '{arg}' kwarg instead of passing '-f{arg}' manually to {self.name!r}")
+ return True
+
+ k = OptionKey(option)
+ if arg in kwargs:
+ val = kwargs[arg]
+ elif k in environment.coredata.options:
+ val = environment.coredata.options[k].value
+ else:
+ val = False
+
+ if not isinstance(val, bool):
+ raise InvalidArguments(f'Argument {arg} to {self.name!r} must be boolean')
+ return val
+
+ def get_filename(self):
+ return self.filename
+
+ def get_outputs(self) -> T.List[str]:
+ return self.outputs
+
+ def get_extra_args(self, language):
+ return self.extra_args.get(language, [])
+
+ def get_dependencies(self, exclude=None):
+ transitive_deps = []
+ if exclude is None:
+ exclude = []
+ for t in itertools.chain(self.link_targets, self.link_whole_targets):
+ if t in transitive_deps or t in exclude:
+ continue
+ transitive_deps.append(t)
+ if isinstance(t, StaticLibrary):
+ transitive_deps += t.get_dependencies(transitive_deps + exclude)
+ return transitive_deps
+
+ def get_source_subdir(self):
+ return self.subdir
+
+ def get_sources(self):
+ return self.sources
+
+ def get_objects(self):
+ return self.objects
+
+ def get_generated_sources(self):
+ return self.generated
+
+ def should_install(self) -> bool:
+ return self.need_install
+
+ def has_pch(self):
+ return len(self.pch) > 0
+
+ def get_pch(self, language):
+ try:
+ return self.pch[language]
+ except KeyError:
+ return[]
+
+ def get_include_dirs(self):
+ return self.include_dirs
+
+ def add_deps(self, deps):
+ deps = listify(deps)
+ for dep in deps:
+ if dep in self.added_deps:
+ continue
+ if isinstance(dep, dependencies.InternalDependency):
+ # Those parts that are internal.
+ self.process_sourcelist(dep.sources)
+ self.add_include_dirs(dep.include_directories, dep.get_include_type())
+ for l in dep.libraries:
+ self.link(l)
+ for l in dep.whole_libraries:
+ self.link_whole(l)
+ if dep.get_compile_args() or dep.get_link_args():
+ # Those parts that are external.
+ extpart = dependencies.InternalDependency('undefined',
+ [],
+ dep.get_compile_args(),
+ dep.get_link_args(),
+ [], [], [], [], {})
+ self.external_deps.append(extpart)
+ # Deps of deps.
+ self.add_deps(dep.ext_deps)
+ elif isinstance(dep, dependencies.Dependency):
+ if dep not in self.external_deps:
+ self.external_deps.append(dep)
+ self.process_sourcelist(dep.get_sources())
+ self.add_deps(dep.ext_deps)
+ elif isinstance(dep, BuildTarget):
+ raise InvalidArguments('''Tried to use a build target as a dependency.
+You probably should put it in link_with instead.''')
+ else:
+ # This is a bit of a hack. We do not want Build to know anything
+ # about the interpreter so we can't import it and use isinstance.
+ # This should be reliable enough.
+ if hasattr(dep, 'project_args_frozen') or hasattr(dep, 'global_args_frozen'):
+ raise InvalidArguments('Tried to use subproject object as a dependency.\n'
+ 'You probably wanted to use a dependency declared in it instead.\n'
+ 'Access it by calling get_variable() on the subproject object.')
+ raise InvalidArguments(f'Argument is of an unacceptable type {type(dep).__name__!r}.\nMust be '
+ 'either an external dependency (returned by find_library() or '
+ 'dependency()) or an internal dependency (returned by '
+ 'declare_dependency()).')
+ self.added_deps.add(dep)
+
+ def get_external_deps(self):
+ return self.external_deps
+
+ def is_internal(self):
+ return isinstance(self, StaticLibrary) and not self.need_install
+
+ def link(self, target):
+ for t in listify(target):
+ if isinstance(self, StaticLibrary) and self.need_install:
+ if isinstance(t, (CustomTarget, CustomTargetIndex)):
+ if not t.should_install():
+ mlog.warning(f'Try to link an installed static library target {self.name} with a'
+ 'custom target that is not installed, this might cause problems'
+ 'when you try to use this static library')
+ elif t.is_internal():
+ # When we're a static library and we link_with to an
+ # internal/convenience library, promote to link_whole.
+ return self.link_whole(t)
+ if not isinstance(t, (Target, CustomTargetIndex)):
+ raise InvalidArguments(f'{t!r} is not a target.')
+ if not t.is_linkable_target():
+ raise InvalidArguments(f"Link target '{t!s}' is not linkable.")
+ if isinstance(self, SharedLibrary) and isinstance(t, StaticLibrary) and not t.pic:
+ msg = f"Can't link non-PIC static library {t.name!r} into shared library {self.name!r}. "
+ msg += "Use the 'pic' option to static_library to build with PIC."
+ raise InvalidArguments(msg)
+ if self.for_machine is not t.for_machine:
+ msg = f'Tried to mix libraries for machines {self.for_machine} and {t.for_machine} in target {self.name!r}'
+ if self.environment.is_cross_build():
+ raise InvalidArguments(msg + ' This is not possible in a cross build.')
+ else:
+ mlog.warning(msg + ' This will fail in cross build.')
+ self.link_targets.append(t)
+
+ def link_whole(self, target):
+ for t in listify(target):
+ if isinstance(t, (CustomTarget, CustomTargetIndex)):
+ if not t.is_linkable_target():
+ raise InvalidArguments(f'Custom target {t!r} is not linkable.')
+ if not t.get_filename().endswith('.a'):
+ raise InvalidArguments('Can only link_whole custom targets that are .a archives.')
+ if isinstance(self, StaticLibrary):
+ # FIXME: We could extract the .a archive to get object files
+ raise InvalidArguments('Cannot link_whole a custom target into a static library')
+ elif not isinstance(t, StaticLibrary):
+ raise InvalidArguments(f'{t!r} is not a static library.')
+ elif isinstance(self, SharedLibrary) and not t.pic:
+ msg = f"Can't link non-PIC static library {t.name!r} into shared library {self.name!r}. "
+ msg += "Use the 'pic' option to static_library to build with PIC."
+ raise InvalidArguments(msg)
+ if self.for_machine is not t.for_machine:
+ msg = f'Tried to mix libraries for machines {self.for_machine} and {t.for_machine} in target {self.name!r}'
+ if self.environment.is_cross_build():
+ raise InvalidArguments(msg + ' This is not possible in a cross build.')
+ else:
+ mlog.warning(msg + ' This will fail in cross build.')
+ if isinstance(self, StaticLibrary):
+ # When we're a static library and we link_whole: to another static
+ # library, we need to add that target's objects to ourselves.
+ self.objects += t.extract_all_objects_recurse()
+ self.link_whole_targets.append(t)
+
+ def extract_all_objects_recurse(self):
+ objs = [self.extract_all_objects()]
+ for t in self.link_targets:
+ if t.is_internal():
+ objs += t.extract_all_objects_recurse()
+ return objs
+
+ def add_pch(self, language, pchlist):
+ if not pchlist:
+ return
+ elif len(pchlist) == 1:
+ if not environment.is_header(pchlist[0]):
+ raise InvalidArguments(f'PCH argument {pchlist[0]} is not a header.')
+ elif len(pchlist) == 2:
+ if environment.is_header(pchlist[0]):
+ if not environment.is_source(pchlist[1]):
+ raise InvalidArguments('PCH definition must contain one header and at most one source.')
+ elif environment.is_source(pchlist[0]):
+ if not environment.is_header(pchlist[1]):
+ raise InvalidArguments('PCH definition must contain one header and at most one source.')
+ pchlist = [pchlist[1], pchlist[0]]
+ else:
+ raise InvalidArguments(f'PCH argument {pchlist[0]} is of unknown type.')
+
+ if (os.path.dirname(pchlist[0]) != os.path.dirname(pchlist[1])):
+ raise InvalidArguments('PCH files must be stored in the same folder.')
+
+ mlog.warning('PCH source files are deprecated, only a single header file should be used.')
+ elif len(pchlist) > 2:
+ raise InvalidArguments('PCH definition may have a maximum of 2 files.')
+ for f in pchlist:
+ if not isinstance(f, str):
+ raise MesonException('PCH arguments must be strings.')
+ if not os.path.isfile(os.path.join(self.environment.source_dir, self.subdir, f)):
+ raise MesonException(f'File {f} does not exist.')
+ self.pch[language] = pchlist
+
+ def add_include_dirs(self, args, set_is_system: T.Optional[str] = None):
+ ids = []
+ for a in args:
+ if not isinstance(a, IncludeDirs):
+ raise InvalidArguments('Include directory to be added is not an include directory object.')
+ ids.append(a)
+ if set_is_system is None:
+ set_is_system = 'preserve'
+ if set_is_system != 'preserve':
+ is_system = set_is_system == 'system'
+ ids = [IncludeDirs(x.get_curdir(), x.get_incdirs(), is_system, x.get_extra_build_dirs()) for x in ids]
+ self.include_dirs += ids
+
+ def add_compiler_args(self, language: str, args: T.List['FileOrString']) -> None:
+ args = listify(args)
+ for a in args:
+ if not isinstance(a, (str, File)):
+ raise InvalidArguments('A non-string passed to compiler args.')
+ if language in self.extra_args:
+ self.extra_args[language] += args
+ else:
+ self.extra_args[language] = args
+
+ def get_aliases(self) -> T.Dict[str, str]:
+ return {}
+
+ def get_langs_used_by_deps(self) -> T.List[str]:
+ '''
+ Sometimes you want to link to a C++ library that exports C API, which
+ means the linker must link in the C++ stdlib, and we must use a C++
+ compiler for linking. The same is also applicable for objc/objc++, etc,
+ so we can keep using clink_langs for the priority order.
+
+ See: https://github.com/mesonbuild/meson/issues/1653
+ '''
+ langs = [] # type: T.List[str]
+
+ # Check if any of the external libraries were written in this language
+ for dep in self.external_deps:
+ if dep.language is None:
+ continue
+ if dep.language not in langs:
+ langs.append(dep.language)
+ # Check if any of the internal libraries this target links to were
+ # written in this language
+ for link_target in itertools.chain(self.link_targets, self.link_whole_targets):
+ if isinstance(link_target, (CustomTarget, CustomTargetIndex)):
+ continue
+ for language in link_target.compilers:
+ if language not in langs:
+ langs.append(language)
+
+ return langs
+
+ def get_prelinker(self):
+ all_compilers = self.environment.coredata.compilers[self.for_machine]
+ if self.link_language:
+ comp = all_compilers[self.link_language]
+ return comp
+ for l in clink_langs:
+ if l in self.compilers:
+ try:
+ prelinker = all_compilers[l]
+ except KeyError:
+ raise MesonException(
+ f'Could not get a prelinker linker for build target {self.name!r}. '
+ f'Requires a compiler for language "{l}", but that is not '
+ 'a project language.')
+ return prelinker
+ raise MesonException(f'Could not determine prelinker for {self.name!r}.')
+
+ def get_clink_dynamic_linker_and_stdlibs(self):
+ '''
+ We use the order of languages in `clink_langs` to determine which
+ linker to use in case the target has sources compiled with multiple
+ compilers. All languages other than those in this list have their own
+ linker.
+ Note that Vala outputs C code, so Vala sources can use any linker
+ that can link compiled C. We don't actually need to add an exception
+ for Vala here because of that.
+ '''
+ # Populate list of all compilers, not just those being used to compile
+ # sources in this target
+ all_compilers = self.environment.coredata.compilers[self.for_machine]
+
+ # If the user set the link_language, just return that.
+ if self.link_language:
+ comp = all_compilers[self.link_language]
+ return comp, comp.language_stdlib_only_link_flags()
+
+ # Languages used by dependencies
+ dep_langs = self.get_langs_used_by_deps()
+ # Pick a compiler based on the language priority-order
+ for l in clink_langs:
+ if l in self.compilers or l in dep_langs:
+ try:
+ linker = all_compilers[l]
+ except KeyError:
+ raise MesonException(
+ f'Could not get a dynamic linker for build target {self.name!r}. '
+ f'Requires a linker for language "{l}", but that is not '
+ 'a project language.')
+ stdlib_args = []
+ added_languages = set()
+ for dl in itertools.chain(self.compilers, dep_langs):
+ if dl != linker.language:
+ stdlib_args += all_compilers[dl].language_stdlib_only_link_flags()
+ added_languages.add(dl)
+ # Type of var 'linker' is Compiler.
+ # Pretty hard to fix because the return value is passed everywhere
+ return linker, stdlib_args
+
+ raise AssertionError(f'Could not get a dynamic linker for build target {self.name!r}')
+
+ def uses_rust(self) -> bool:
+ """Is this target a rust target."""
+ if self.sources:
+ first_file = self.sources[0]
+ if first_file.fname.endswith('.rs'):
+ return True
+ elif self.generated:
+ if self.generated[0].get_outputs()[0].endswith('.rs'):
+ return True
+ return False
+
+ def get_using_msvc(self):
+ '''
+ Check if the dynamic linker is MSVC. Used by Executable, StaticLibrary,
+ and SharedLibrary for deciding when to use MSVC-specific file naming
+ and debug filenames.
+
+ If at least some code is built with MSVC and the final library is
+ linked with MSVC, we can be sure that some debug info will be
+ generated. We only check the dynamic linker here because the static
+ linker is guaranteed to be of the same type.
+
+ Interesting cases:
+ 1. The Vala compiler outputs C code to be compiled by whatever
+ C compiler we're using, so all objects will still be created by the
+ MSVC compiler.
+ 2. If the target contains only objects, process_compilers guesses and
+ picks the first compiler that smells right.
+ '''
+ # Rustc can use msvc style linkers
+ if self.uses_rust():
+ compiler = self.environment.coredata.compilers[self.for_machine]['rust']
+ else:
+ compiler, _ = self.get_clink_dynamic_linker_and_stdlibs()
+ # Mixing many languages with MSVC is not supported yet so ignore stdlibs.
+ return compiler and compiler.get_linker_id() in {'link', 'lld-link', 'xilink', 'optlink'}
+
+ def check_module_linking(self):
+ '''
+ Warn if shared modules are linked with target: (link_with) #2865
+ '''
+ for link_target in self.link_targets:
+ if isinstance(link_target, SharedModule):
+ if self.environment.machines[self.for_machine].is_darwin():
+ raise MesonException(
+ 'target links against shared modules. This is not permitted on OSX')
+ else:
+ mlog.warning('target links against shared modules. This '
+ 'is not recommended as it is not supported on some '
+ 'platforms')
+ return
+
+class Generator(HoldableObject):
+ def __init__(self, exe: T.Union['Executable', programs.ExternalProgram],
+ arguments: T.List[str],
+ output: T.List[str],
+ *,
+ depfile: T.Optional[str] = None,
+ capture: bool = False,
+ depends: T.Optional[T.List[T.Union[BuildTarget, 'CustomTarget']]] = None,
+ name: str = 'Generator'):
+ self.exe = exe
+ self.depfile = depfile
+ self.capture = capture
+ self.depends: T.List[T.Union[BuildTarget, 'CustomTarget']] = depends or []
+ self.arglist = arguments
+ self.outputs = output
+ self.name = name
+
+ def __repr__(self) -> str:
+ repr_str = "<{0}: {1}>"
+ return repr_str.format(self.__class__.__name__, self.exe)
+
+ def get_exe(self) -> T.Union['Executable', programs.ExternalProgram]:
+ return self.exe
+
+ def get_base_outnames(self, inname: str) -> T.List[str]:
+ plainname = os.path.basename(inname)
+ basename = os.path.splitext(plainname)[0]
+ bases = [x.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) for x in self.outputs]
+ return bases
+
+ def get_dep_outname(self, inname: str) -> T.List[str]:
+ if self.depfile is None:
+ raise InvalidArguments('Tried to get dep name for rule that does not have dependency file defined.')
+ plainname = os.path.basename(inname)
+ basename = os.path.splitext(plainname)[0]
+ return self.depfile.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname)
+
+ def get_arglist(self, inname: str) -> T.List[str]:
+ plainname = os.path.basename(inname)
+ basename = os.path.splitext(plainname)[0]
+ return [x.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) for x in self.arglist]
+
+ @staticmethod
+ def is_parent_path(parent: str, trial: str) -> bool:
+ relpath = pathlib.PurePath(trial).relative_to(parent)
+ return relpath.parts[0] != '..' # For subdirs we can only go "down".
+
+ def process_files(self, files: T.Iterable[T.Union[str, File, 'CustomTarget', 'CustomTargetIndex', 'GeneratedList']],
+ state: T.Union['Interpreter', 'ModuleState'],
+ preserve_path_from: T.Optional[str] = None,
+ extra_args: T.Optional[T.List[str]] = None) -> 'GeneratedList':
+ output = GeneratedList(self, state.subdir, preserve_path_from, extra_args=extra_args if extra_args is not None else [])
+
+ for e in files:
+ if isinstance(e, CustomTarget):
+ output.depends.add(e)
+ if isinstance(e, CustomTargetIndex):
+ output.depends.add(e.target)
+
+ if isinstance(e, (CustomTarget, CustomTargetIndex, GeneratedList)):
+ self.depends.append(e) # BUG: this should go in the GeneratedList object, not this object.
+ fs = [File.from_built_file(state.subdir, f) for f in e.get_outputs()]
+ elif isinstance(e, str):
+ fs = [File.from_source_file(state.environment.source_dir, state.subdir, e)]
+ else:
+ fs = [e]
+
+ for f in fs:
+ if preserve_path_from:
+ abs_f = f.absolute_path(state.environment.source_dir, state.environment.build_dir)
+ if not self.is_parent_path(preserve_path_from, abs_f):
+ raise InvalidArguments('generator.process: When using preserve_path_from, all input files must be in a subdirectory of the given dir.')
+ output.add_file(f, state)
+ return output
+
+
+class GeneratedList(HoldableObject):
+
+ """The output of generator.process."""
+
+ def __init__(self, generator: Generator, subdir: str,
+ preserve_path_from: T.Optional[str],
+ extra_args: T.List[str]):
+ self.generator = generator
+ self.name = generator.exe
+ self.depends: T.Set['CustomTarget'] = set() # Things this target depends on (because e.g. a custom target was used as input)
+ self.subdir = subdir
+ self.infilelist: T.List['File'] = []
+ self.outfilelist: T.List[str] = []
+ self.outmap: T.Dict[File, T.List[str]] = {}
+ self.extra_depends = [] # XXX: Doesn't seem to be used?
+ self.depend_files: T.List[File] = []
+ self.preserve_path_from = preserve_path_from
+ self.extra_args: T.List[str] = extra_args if extra_args is not None else []
+
+ if isinstance(self.generator.exe, programs.ExternalProgram):
+ if not self.generator.exe.found():
+ raise InvalidArguments('Tried to use not-found external program as generator')
+ path = self.generator.exe.get_path()
+ if os.path.isabs(path):
+ # Can only add a dependency on an external program which we
+ # know the absolute path of
+ self.depend_files.append(File.from_absolute_file(path))
+
+ def add_preserved_path_segment(self, infile: File, outfiles: T.List[str], state: T.Union['Interpreter', 'ModuleState']) -> T.List[str]:
+ result: T.List[str] = []
+ in_abs = infile.absolute_path(state.environment.source_dir, state.environment.build_dir)
+ assert os.path.isabs(self.preserve_path_from)
+ rel = os.path.relpath(in_abs, self.preserve_path_from)
+ path_segment = os.path.dirname(rel)
+ for of in outfiles:
+ result.append(os.path.join(path_segment, of))
+ return result
+
+ def add_file(self, newfile: File, state: T.Union['Interpreter', 'ModuleState']) -> None:
+ self.infilelist.append(newfile)
+ outfiles = self.generator.get_base_outnames(newfile.fname)
+ if self.preserve_path_from:
+ outfiles = self.add_preserved_path_segment(newfile, outfiles, state)
+ self.outfilelist += outfiles
+ self.outmap[newfile] = outfiles
+
+ def get_inputs(self) -> T.List['File']:
+ return self.infilelist
+
+ def get_outputs(self) -> T.List[str]:
+ return self.outfilelist
+
+ def get_outputs_for(self, filename: 'File') -> T.List[str]:
+ return self.outmap[filename]
+
+ def get_generator(self) -> 'Generator':
+ return self.generator
+
+ def get_extra_args(self) -> T.List[str]:
+ return self.extra_args
+
+ def get_subdir(self) -> str:
+ return self.subdir
+
+
+class Executable(BuildTarget):
+ known_kwargs = known_exe_kwargs
+
+ def __init__(self, name: str, subdir: str, subproject: str, for_machine: MachineChoice,
+ sources: T.List[File], objects, environment: environment.Environment, kwargs):
+ self.typename = 'executable'
+ key = OptionKey('b_pie')
+ if 'pie' not in kwargs and key in environment.coredata.options:
+ kwargs['pie'] = environment.coredata.options[key].value
+ super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs)
+ # Unless overridden, executables have no suffix or prefix. Except on
+ # Windows and with C#/Mono executables where the suffix is 'exe'
+ if not hasattr(self, 'prefix'):
+ self.prefix = ''
+ if not hasattr(self, 'suffix'):
+ machine = environment.machines[for_machine]
+ # Executable for Windows or C#/Mono
+ if machine.is_windows() or machine.is_cygwin() or 'cs' in self.compilers:
+ self.suffix = 'exe'
+ elif machine.system.startswith('wasm') or machine.system == 'emscripten':
+ self.suffix = 'js'
+ elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('arm') or
+ 'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('arm')):
+ self.suffix = 'axf'
+ elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('ccrx') or
+ 'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('ccrx')):
+ self.suffix = 'abs'
+ elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('xc16')):
+ self.suffix = 'elf'
+ elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('c2000') or
+ 'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('c2000')):
+ self.suffix = 'out'
+ else:
+ self.suffix = environment.machines[for_machine].get_exe_suffix()
+ self.filename = self.name
+ if self.suffix:
+ self.filename += '.' + self.suffix
+ self.outputs = [self.filename]
+
+ # The import library this target will generate
+ self.import_filename = None
+ # The import library that Visual Studio would generate (and accept)
+ self.vs_import_filename = None
+ # The import library that GCC would generate (and prefer)
+ self.gcc_import_filename = None
+ # The debugging information file this target will generate
+ self.debug_filename = None
+
+ # Check for export_dynamic
+ self.export_dynamic = False
+ if kwargs.get('export_dynamic'):
+ if not isinstance(kwargs['export_dynamic'], bool):
+ raise InvalidArguments('"export_dynamic" keyword argument must be a boolean')
+ self.export_dynamic = True
+ if kwargs.get('implib'):
+ self.export_dynamic = True
+ if self.export_dynamic and kwargs.get('implib') is False:
+ raise InvalidArguments('"implib" keyword argument must not be false for if "export_dynamic" is true')
+
+ m = environment.machines[for_machine]
+
+ # If using export_dynamic, set the import library name
+ if self.export_dynamic:
+ implib_basename = self.name + '.exe'
+ if not isinstance(kwargs.get('implib', False), bool):
+ implib_basename = kwargs['implib']
+ if m.is_windows() or m.is_cygwin():
+ self.vs_import_filename = f'{implib_basename}.lib'
+ self.gcc_import_filename = f'lib{implib_basename}.a'
+ if self.get_using_msvc():
+ self.import_filename = self.vs_import_filename
+ else:
+ self.import_filename = self.gcc_import_filename
+
+ if m.is_windows() and ('cs' in self.compilers or
+ self.uses_rust() or
+ self.get_using_msvc()):
+ self.debug_filename = self.name + '.pdb'
+
+ # Only linkwithable if using export_dynamic
+ self.is_linkwithable = self.export_dynamic
+
+ # Remember that this exe was returned by `find_program()` through an override
+ self.was_returned_by_find_program = False
+
+ def get_default_install_dir(self, environment: environment.Environment) -> str:
+ return environment.get_bindir()
+
+ def description(self):
+ '''Human friendly description of the executable'''
+ return self.name
+
+ def type_suffix(self):
+ return "@exe"
+
+ def get_import_filename(self):
+ """
+ The name of the import library that will be outputted by the compiler
+
+ Returns None if there is no import library required for this platform
+ """
+ return self.import_filename
+
+ def get_import_filenameslist(self):
+ if self.import_filename:
+ return [self.vs_import_filename, self.gcc_import_filename]
+ return []
+
+ def get_debug_filename(self):
+ """
+ The name of debuginfo file that will be created by the compiler
+
+ Returns None if the build won't create any debuginfo file
+ """
+ return self.debug_filename
+
+ def is_linkable_target(self):
+ return self.is_linkwithable
+
+class StaticLibrary(BuildTarget):
+ known_kwargs = known_stlib_kwargs
+
+ def __init__(self, name, subdir, subproject, for_machine: MachineChoice, sources, objects, environment, kwargs):
+ self.typename = 'static library'
+ super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs)
+ if 'cs' in self.compilers:
+ raise InvalidArguments('Static libraries not supported for C#.')
+ if 'rust' in self.compilers:
+ # If no crate type is specified, or it's the generic lib type, use rlib
+ if not hasattr(self, 'rust_crate_type') or self.rust_crate_type == 'lib':
+ mlog.debug('Defaulting Rust static library target crate type to rlib')
+ self.rust_crate_type = 'rlib'
+ # Don't let configuration proceed with a non-static crate type
+ elif self.rust_crate_type not in ['rlib', 'staticlib']:
+ raise InvalidArguments(f'Crate type "{self.rust_crate_type}" invalid for static libraries; must be "rlib" or "staticlib"')
+ # By default a static library is named libfoo.a even on Windows because
+ # MSVC does not have a consistent convention for what static libraries
+ # are called. The MSVC CRT uses libfoo.lib syntax but nothing else uses
+ # it and GCC only looks for static libraries called foo.lib and
+ # libfoo.a. However, we cannot use foo.lib because that's the same as
+ # the import library. Using libfoo.a is ok because people using MSVC
+ # always pass the library filename while linking anyway.
+ if not hasattr(self, 'prefix'):
+ self.prefix = 'lib'
+ if not hasattr(self, 'suffix'):
+ if 'rust' in self.compilers:
+ if not hasattr(self, 'rust_crate_type') or self.rust_crate_type == 'rlib':
+ # default Rust static library suffix
+ self.suffix = 'rlib'
+ elif self.rust_crate_type == 'staticlib':
+ self.suffix = 'a'
+ else:
+ self.suffix = 'a'
+ self.filename = self.prefix + self.name + '.' + self.suffix
+ self.outputs = [self.filename]
+ self.prelink = kwargs.get('prelink', False)
+ if not isinstance(self.prelink, bool):
+ raise InvalidArguments('Prelink keyword argument must be a boolean.')
+
+ def get_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]:
+ return {}
+
+ def get_default_install_dir(self, environment):
+ return environment.get_static_lib_dir()
+
+ def type_suffix(self):
+ return "@sta"
+
+ def process_kwargs(self, kwargs, environment):
+ super().process_kwargs(kwargs, environment)
+ if 'rust_crate_type' in kwargs:
+ rust_crate_type = kwargs['rust_crate_type']
+ if isinstance(rust_crate_type, str):
+ self.rust_crate_type = rust_crate_type
+ else:
+ raise InvalidArguments(f'Invalid rust_crate_type "{rust_crate_type}": must be a string.')
+
+ def is_linkable_target(self):
+ return True
+
+class SharedLibrary(BuildTarget):
+ known_kwargs = known_shlib_kwargs
+
+ def __init__(self, name, subdir, subproject, for_machine: MachineChoice, sources, objects, environment, kwargs):
+ self.typename = 'shared library'
+ self.soversion = None
+ self.ltversion = None
+ # Max length 2, first element is compatibility_version, second is current_version
+ self.darwin_versions = []
+ self.vs_module_defs = None
+ # The import library this target will generate
+ self.import_filename = None
+ # The import library that Visual Studio would generate (and accept)
+ self.vs_import_filename = None
+ # The import library that GCC would generate (and prefer)
+ self.gcc_import_filename = None
+ # The debugging information file this target will generate
+ self.debug_filename = None
+ # Use by the pkgconfig module
+ self.shared_library_only = False
+ super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs)
+ if 'rust' in self.compilers:
+ # If no crate type is specified, or it's the generic lib type, use dylib
+ if not hasattr(self, 'rust_crate_type') or self.rust_crate_type == 'lib':
+ mlog.debug('Defaulting Rust dynamic library target crate type to "dylib"')
+ self.rust_crate_type = 'dylib'
+ # Don't let configuration proceed with a non-dynamic crate type
+ elif self.rust_crate_type not in ['dylib', 'cdylib']:
+ raise InvalidArguments(f'Crate type "{self.rust_crate_type}" invalid for dynamic libraries; must be "dylib" or "cdylib"')
+ if not hasattr(self, 'prefix'):
+ self.prefix = None
+ if not hasattr(self, 'suffix'):
+ self.suffix = None
+ self.basic_filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
+ self.determine_filenames(environment)
+
+ def get_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]:
+ result: T.Dict[str, str] = {}
+ mappings = self.get_transitive_link_deps_mapping(prefix, environment)
+ old = get_target_macos_dylib_install_name(self)
+ if old not in mappings:
+ fname = self.get_filename()
+ outdirs, _ = self.get_install_dir(self.environment)
+ new = os.path.join(prefix, outdirs[0], fname)
+ result.update({old: new})
+ mappings.update(result)
+ return mappings
+
+ def get_default_install_dir(self, environment):
+ return environment.get_shared_lib_dir()
+
+ def determine_filenames(self, env):
+ """
+ See https://github.com/mesonbuild/meson/pull/417 for details.
+
+ First we determine the filename template (self.filename_tpl), then we
+ set the output filename (self.filename).
+
+ The template is needed while creating aliases (self.get_aliases),
+ which are needed while generating .so shared libraries for Linux.
+
+ Besides this, there's also the import library name, which is only used
+ on Windows since on that platform the linker uses a separate library
+ called the "import library" during linking instead of the shared
+ library (DLL). The toolchain will output an import library in one of
+ two formats: GCC or Visual Studio.
+
+ When we're building with Visual Studio, the import library that will be
+ generated by the toolchain is self.vs_import_filename, and with
+ MinGW/GCC, it's self.gcc_import_filename. self.import_filename will
+ always contain the import library name this target will generate.
+ """
+ prefix = ''
+ suffix = ''
+ create_debug_file = False
+ self.filename_tpl = self.basic_filename_tpl
+ # NOTE: manual prefix/suffix override is currently only tested for C/C++
+ # C# and Mono
+ if 'cs' in self.compilers:
+ prefix = ''
+ suffix = 'dll'
+ self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
+ create_debug_file = True
+ # C, C++, Swift, Vala
+ # Only Windows uses a separate import library for linking
+ # For all other targets/platforms import_filename stays None
+ elif env.machines[self.for_machine].is_windows():
+ suffix = 'dll'
+ self.vs_import_filename = '{}{}.lib'.format(self.prefix if self.prefix is not None else '', self.name)
+ self.gcc_import_filename = '{}{}.dll.a'.format(self.prefix if self.prefix is not None else 'lib', self.name)
+ if self.uses_rust():
+ # Shared library is of the form foo.dll
+ prefix = ''
+ # Import library is called foo.dll.lib
+ self.import_filename = f'{self.name}.dll.lib'
+ create_debug_file = True
+ elif self.get_using_msvc():
+ # Shared library is of the form foo.dll
+ prefix = ''
+ # Import library is called foo.lib
+ self.import_filename = self.vs_import_filename
+ create_debug_file = True
+ # Assume GCC-compatible naming
+ else:
+ # Shared library is of the form libfoo.dll
+ prefix = 'lib'
+ # Import library is called libfoo.dll.a
+ self.import_filename = self.gcc_import_filename
+ # Shared library has the soversion if it is defined
+ if self.soversion:
+ self.filename_tpl = '{0.prefix}{0.name}-{0.soversion}.{0.suffix}'
+ else:
+ self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
+ elif env.machines[self.for_machine].is_cygwin():
+ suffix = 'dll'
+ self.gcc_import_filename = '{}{}.dll.a'.format(self.prefix if self.prefix is not None else 'lib', self.name)
+ # Shared library is of the form cygfoo.dll
+ # (ld --dll-search-prefix=cyg is the default)
+ prefix = 'cyg'
+ # Import library is called libfoo.dll.a
+ self.import_filename = self.gcc_import_filename
+ if self.soversion:
+ self.filename_tpl = '{0.prefix}{0.name}-{0.soversion}.{0.suffix}'
+ else:
+ self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
+ elif env.machines[self.for_machine].is_darwin():
+ prefix = 'lib'
+ suffix = 'dylib'
+ # On macOS, the filename can only contain the major version
+ if self.soversion:
+ # libfoo.X.dylib
+ self.filename_tpl = '{0.prefix}{0.name}.{0.soversion}.{0.suffix}'
+ else:
+ # libfoo.dylib
+ self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
+ elif env.machines[self.for_machine].is_android():
+ prefix = 'lib'
+ suffix = 'so'
+ # Android doesn't support shared_library versioning
+ self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
+ else:
+ prefix = 'lib'
+ suffix = 'so'
+ if self.ltversion:
+ # libfoo.so.X[.Y[.Z]] (.Y and .Z are optional)
+ self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}.{0.ltversion}'
+ elif self.soversion:
+ # libfoo.so.X
+ self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}.{0.soversion}'
+ else:
+ # No versioning, libfoo.so
+ self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}'
+ if self.prefix is None:
+ self.prefix = prefix
+ if self.suffix is None:
+ self.suffix = suffix
+ self.filename = self.filename_tpl.format(self)
+ self.outputs = [self.filename]
+ if create_debug_file:
+ self.debug_filename = os.path.splitext(self.filename)[0] + '.pdb'
+
+ @staticmethod
+ def _validate_darwin_versions(darwin_versions):
+ try:
+ if isinstance(darwin_versions, int):
+ darwin_versions = str(darwin_versions)
+ if isinstance(darwin_versions, str):
+ darwin_versions = 2 * [darwin_versions]
+ if not isinstance(darwin_versions, list):
+ raise InvalidArguments('Shared library darwin_versions: must be a string, integer,'
+ f'or a list, not {darwin_versions!r}')
+ if len(darwin_versions) > 2:
+ raise InvalidArguments('Shared library darwin_versions: list must contain 2 or fewer elements')
+ if len(darwin_versions) == 1:
+ darwin_versions = 2 * darwin_versions
+ for i, v in enumerate(darwin_versions[:]):
+ if isinstance(v, int):
+ v = str(v)
+ if not isinstance(v, str):
+ raise InvalidArguments('Shared library darwin_versions: list elements '
+ f'must be strings or integers, not {v!r}')
+ if not re.fullmatch(r'[0-9]+(\.[0-9]+){0,2}', v):
+ raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z where '
+ 'X, Y, Z are numbers, and Y and Z are optional')
+ parts = v.split('.')
+ if len(parts) in (1, 2, 3) and int(parts[0]) > 65535:
+ raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z '
+ 'where X is [0, 65535] and Y, Z are optional')
+ if len(parts) in (2, 3) and int(parts[1]) > 255:
+ raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z '
+ 'where Y is [0, 255] and Y, Z are optional')
+ if len(parts) == 3 and int(parts[2]) > 255:
+ raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z '
+ 'where Z is [0, 255] and Y, Z are optional')
+ darwin_versions[i] = v
+ except ValueError:
+ raise InvalidArguments('Shared library darwin_versions: value is invalid')
+ return darwin_versions
+
+ def process_kwargs(self, kwargs, environment):
+ super().process_kwargs(kwargs, environment)
+
+ if not self.environment.machines[self.for_machine].is_android():
+ supports_versioning = True
+ else:
+ supports_versioning = False
+
+ if supports_versioning:
+ # Shared library version
+ if 'version' in kwargs:
+ self.ltversion = kwargs['version']
+ if not isinstance(self.ltversion, str):
+ raise InvalidArguments('Shared library version needs to be a string, not ' + type(self.ltversion).__name__)
+ if not re.fullmatch(r'[0-9]+(\.[0-9]+){0,2}', self.ltversion):
+ raise InvalidArguments(f'Invalid Shared library version "{self.ltversion}". Must be of the form X.Y.Z where all three are numbers. Y and Z are optional.')
+ # Try to extract/deduce the soversion
+ if 'soversion' in kwargs:
+ self.soversion = kwargs['soversion']
+ if isinstance(self.soversion, int):
+ self.soversion = str(self.soversion)
+ if not isinstance(self.soversion, str):
+ raise InvalidArguments('Shared library soversion is not a string or integer.')
+ elif self.ltversion:
+ # library version is defined, get the soversion from that
+ # We replicate what Autotools does here and take the first
+ # number of the version by default.
+ self.soversion = self.ltversion.split('.')[0]
+ # macOS, iOS and tvOS dylib compatibility_version and current_version
+ if 'darwin_versions' in kwargs:
+ self.darwin_versions = self._validate_darwin_versions(kwargs['darwin_versions'])
+ elif self.soversion:
+ # If unspecified, pick the soversion
+ self.darwin_versions = 2 * [self.soversion]
+
+ # Visual Studio module-definitions file
+ if 'vs_module_defs' in kwargs:
+ path = kwargs['vs_module_defs']
+ if isinstance(path, str):
+ if os.path.isabs(path):
+ self.vs_module_defs = File.from_absolute_file(path)
+ else:
+ self.vs_module_defs = File.from_source_file(environment.source_dir, self.subdir, path)
+ self.link_depends.append(self.vs_module_defs)
+ elif isinstance(path, File):
+ # When passing a generated file.
+ self.vs_module_defs = path
+ self.link_depends.append(path)
+ elif hasattr(path, 'get_filename'):
+ # When passing output of a Custom Target
+ path = File.from_built_file(path.subdir, path.get_filename())
+ self.vs_module_defs = path
+ self.link_depends.append(path)
+ else:
+ raise InvalidArguments(
+ 'Shared library vs_module_defs must be either a string, '
+ 'a file object or a Custom Target')
+ if 'rust_crate_type' in kwargs:
+ rust_crate_type = kwargs['rust_crate_type']
+ if isinstance(rust_crate_type, str):
+ self.rust_crate_type = rust_crate_type
+ else:
+ raise InvalidArguments(f'Invalid rust_crate_type "{rust_crate_type}": must be a string.')
+
+ def get_import_filename(self):
+ """
+ The name of the import library that will be outputted by the compiler
+
+ Returns None if there is no import library required for this platform
+ """
+ return self.import_filename
+
+ def get_debug_filename(self):
+ """
+ The name of debuginfo file that will be created by the compiler
+
+ Returns None if the build won't create any debuginfo file
+ """
+ return self.debug_filename
+
+ def get_import_filenameslist(self):
+ if self.import_filename:
+ return [self.vs_import_filename, self.gcc_import_filename]
+ return []
+
+ def get_all_link_deps(self):
+ return [self] + self.get_transitive_link_deps()
+
+ def get_aliases(self) -> T.Dict[str, str]:
+ """
+ If the versioned library name is libfoo.so.0.100.0, aliases are:
+ * libfoo.so.0 (soversion) -> libfoo.so.0.100.0
+ * libfoo.so (unversioned; for linking) -> libfoo.so.0
+ Same for dylib:
+ * libfoo.dylib (unversioned; for linking) -> libfoo.0.dylib
+ """
+ aliases: T.Dict[str, str] = {}
+ # Aliases are only useful with .so and .dylib libraries. Also if
+ # there's no self.soversion (no versioning), we don't need aliases.
+ if self.suffix not in ('so', 'dylib') or not self.soversion:
+ return aliases
+ # With .so libraries, the minor and micro versions are also in the
+ # filename. If ltversion != soversion we create an soversion alias:
+ # libfoo.so.0 -> libfoo.so.0.100.0
+ # Where libfoo.so.0.100.0 is the actual library
+ if self.suffix == 'so' and self.ltversion and self.ltversion != self.soversion:
+ alias_tpl = self.filename_tpl.replace('ltversion', 'soversion')
+ ltversion_filename = alias_tpl.format(self)
+ aliases[ltversion_filename] = self.filename
+ # libfoo.so.0/libfoo.0.dylib is the actual library
+ else:
+ ltversion_filename = self.filename
+ # Unversioned alias:
+ # libfoo.so -> libfoo.so.0
+ # libfoo.dylib -> libfoo.0.dylib
+ aliases[self.basic_filename_tpl.format(self)] = ltversion_filename
+ return aliases
+
+ def type_suffix(self):
+ return "@sha"
+
+ def is_linkable_target(self):
+ return True
+
+# A shared library that is meant to be used with dlopen rather than linking
+# into something else.
+class SharedModule(SharedLibrary):
+ known_kwargs = known_shmod_kwargs
+
+ def __init__(self, name, subdir, subproject, for_machine: MachineChoice, sources, objects, environment, kwargs):
+ if 'version' in kwargs:
+ raise MesonException('Shared modules must not specify the version kwarg.')
+ if 'soversion' in kwargs:
+ raise MesonException('Shared modules must not specify the soversion kwarg.')
+ super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs)
+ self.typename = 'shared module'
+
+ def get_default_install_dir(self, environment):
+ return environment.get_shared_module_dir()
+
+class BothLibraries(SecondLevelHolder):
+ def __init__(self, shared: SharedLibrary, static: StaticLibrary) -> None:
+ self._preferred_library = 'shared'
+ self.shared = shared
+ self.static = static
+ self.subproject = self.shared.subproject
+
+ def __repr__(self) -> str:
+ return f'<BothLibraries: static={repr(self.static)}; shared={repr(self.shared)}>'
+
+ def get_default_object(self) -> BuildTarget:
+ if self._preferred_library == 'shared':
+ return self.shared
+ elif self._preferred_library == 'static':
+ return self.static
+ raise MesonBugException(f'self._preferred_library == "{self._preferred_library}" is neither "shared" nor "static".')
+
+class CommandBase:
+ def flatten_command(self, cmd):
+ cmd = listify(cmd)
+ final_cmd = []
+ for c in cmd:
+ if isinstance(c, str):
+ final_cmd.append(c)
+ elif isinstance(c, File):
+ self.depend_files.append(c)
+ final_cmd.append(c)
+ elif isinstance(c, programs.ExternalProgram):
+ if not c.found():
+ raise InvalidArguments('Tried to use not-found external program in "command"')
+ path = c.get_path()
+ if os.path.isabs(path):
+ # Can only add a dependency on an external program which we
+ # know the absolute path of
+ self.depend_files.append(File.from_absolute_file(path))
+ final_cmd += c.get_command()
+ elif isinstance(c, (BuildTarget, CustomTarget)):
+ self.dependencies.append(c)
+ final_cmd.append(c)
+ elif isinstance(c, list):
+ final_cmd += self.flatten_command(c)
+ else:
+ raise InvalidArguments(f'Argument {c!r} in "command" is invalid')
+ return final_cmd
+
+class CustomTarget(Target, CommandBase):
+ known_kwargs = {
+ 'input',
+ 'output',
+ 'command',
+ 'capture',
+ 'feed',
+ 'install',
+ 'install_dir',
+ 'install_mode',
+ 'build_always',
+ 'build_always_stale',
+ 'depends',
+ 'depend_files',
+ 'depfile',
+ 'build_by_default',
+ 'override_options',
+ 'console',
+ 'env',
+ }
+
+ def __init__(self, name: str, subdir: str, subproject: str, kwargs: T.Dict[str, T.Any],
+ absolute_paths: bool = False, backend: T.Optional['Backend'] = None):
+ self.typename = 'custom'
+ # TODO expose keyword arg to make MachineChoice.HOST configurable
+ super().__init__(name, subdir, subproject, False, MachineChoice.HOST)
+ self.dependencies: T.List[T.Union[CustomTarget, BuildTarget]] = []
+ self.extra_depends = []
+ self.depend_files = [] # Files that this target depends on but are not on the command line.
+ self.depfile = None
+ self.process_kwargs(kwargs, backend)
+ # Whether to use absolute paths for all files on the commandline
+ self.absolute_paths = absolute_paths
+ unknowns = []
+ for k in kwargs:
+ if k not in CustomTarget.known_kwargs:
+ unknowns.append(k)
+ if unknowns:
+ mlog.warning('Unknown keyword arguments in target {}: {}'.format(self.name, ', '.join(unknowns)))
+
+ def get_default_install_dir(self, environment):
+ return None
+
+ def __repr__(self):
+ repr_str = "<{0} {1}: {2}>"
+ return repr_str.format(self.__class__.__name__, self.get_id(), self.command)
+
+ def get_target_dependencies(self):
+ deps = self.dependencies[:]
+ deps += self.extra_depends
+ for c in self.sources:
+ if isinstance(c, (BuildTarget, CustomTarget)):
+ deps.append(c)
+ return deps
+
+ def get_transitive_build_target_deps(self):
+ '''
+ Recursively fetch the build targets that this custom target depends on,
+ whether through `command:`, `depends:`, or `sources:` The recursion is
+ only performed on custom targets.
+ This is useful for setting PATH on Windows for finding required DLLs.
+ F.ex, if you have a python script that loads a C module that links to
+ other DLLs in your project.
+ '''
+ bdeps = set()
+ deps = self.get_target_dependencies()
+ for d in deps:
+ if isinstance(d, BuildTarget):
+ bdeps.add(d)
+ elif isinstance(d, CustomTarget):
+ bdeps.update(d.get_transitive_build_target_deps())
+ return bdeps
+
+ def process_kwargs(self, kwargs, backend):
+ self.process_kwargs_base(kwargs)
+ self.sources = extract_as_list(kwargs, 'input')
+ if 'output' not in kwargs:
+ raise InvalidArguments('Missing keyword argument "output".')
+ self.outputs = listify(kwargs['output'])
+ # This will substitute values from the input into output and return it.
+ inputs = get_sources_string_names(self.sources, backend)
+ values = get_filenames_templates_dict(inputs, [])
+ for i in self.outputs:
+ if not(isinstance(i, str)):
+ raise InvalidArguments('Output argument not a string.')
+ if i == '':
+ raise InvalidArguments('Output must not be empty.')
+ if i.strip() == '':
+ raise InvalidArguments('Output must not consist only of whitespace.')
+ if has_path_sep(i):
+ raise InvalidArguments(f'Output {i!r} must not contain a path segment.')
+ if '@INPUT@' in i or '@INPUT0@' in i:
+ m = 'Output cannot contain @INPUT@ or @INPUT0@, did you ' \
+ 'mean @PLAINNAME@ or @BASENAME@?'
+ raise InvalidArguments(m)
+ # We already check this during substitution, but the error message
+ # will be unclear/confusing, so check it here.
+ if len(inputs) != 1 and ('@PLAINNAME@' in i or '@BASENAME@' in i):
+ m = "Output cannot contain @PLAINNAME@ or @BASENAME@ when " \
+ "there is more than one input (we can't know which to use)"
+ raise InvalidArguments(m)
+ self.outputs = substitute_values(self.outputs, values)
+ self.capture = kwargs.get('capture', False)
+ if self.capture and len(self.outputs) != 1:
+ raise InvalidArguments('Capturing can only output to a single file.')
+ self.feed = kwargs.get('feed', False)
+ if self.feed and len(self.sources) != 1:
+ raise InvalidArguments('Feeding can only input from a single file.')
+ self.console = kwargs.get('console', False)
+ if not isinstance(self.console, bool):
+ raise InvalidArguments('"console" kwarg only accepts booleans')
+ if self.capture and self.console:
+ raise InvalidArguments("Can't both capture output and output to console")
+ if 'command' not in kwargs:
+ raise InvalidArguments('Missing keyword argument "command".')
+ if 'depfile' in kwargs:
+ depfile = kwargs['depfile']
+ if not isinstance(depfile, str):
+ raise InvalidArguments('Depfile must be a string.')
+ if os.path.basename(depfile) != depfile:
+ raise InvalidArguments('Depfile must be a plain filename without a subdirectory.')
+ self.depfile = depfile
+ self.command = self.flatten_command(kwargs['command'])
+ for c in self.command:
+ if self.capture and isinstance(c, str) and '@OUTPUT@' in c:
+ raise InvalidArguments('@OUTPUT@ is not allowed when capturing output.')
+ if self.feed and isinstance(c, str) and '@INPUT@' in c:
+ raise InvalidArguments('@INPUT@ is not allowed when feeding input.')
+ if 'install' in kwargs:
+ self.install = kwargs['install']
+ if not isinstance(self.install, bool):
+ raise InvalidArguments('"install" must be boolean.')
+ if self.install:
+ if 'install_dir' not in kwargs:
+ raise InvalidArguments('"install_dir" must be specified '
+ 'when installing a target')
+
+ if isinstance(kwargs['install_dir'], list):
+ FeatureNew.single_use('multiple install_dir for custom_target', '0.40.0', self.subproject)
+ # If an item in this list is False, the output corresponding to
+ # the list index of that item will not be installed
+ self.install_dir = typeslistify(kwargs['install_dir'], (str, bool))
+ self.install_mode = kwargs.get('install_mode', None)
+ else:
+ self.install = False
+ self.install_dir = [None]
+ self.install_mode = None
+ if 'build_always' in kwargs and 'build_always_stale' in kwargs:
+ raise InvalidArguments('build_always and build_always_stale are mutually exclusive. Combine build_by_default and build_always_stale.')
+ elif 'build_always' in kwargs:
+ if 'build_by_default' not in kwargs:
+ self.build_by_default = kwargs['build_always']
+ self.build_always_stale = kwargs['build_always']
+ elif 'build_always_stale' in kwargs:
+ self.build_always_stale = kwargs['build_always_stale']
+ if not isinstance(self.build_always_stale, bool):
+ raise InvalidArguments('Argument build_always_stale must be a boolean.')
+ extra_deps, depend_files = [extract_as_list(kwargs, c, pop=False) for c in ['depends', 'depend_files']]
+ for ed in extra_deps:
+ if not isinstance(ed, (CustomTarget, BuildTarget)):
+ raise InvalidArguments('Can only depend on toplevel targets: custom_target or build_target '
+ f'(executable or a library) got: {type(ed)}({ed})')
+ self.extra_depends.append(ed)
+ for i in depend_files:
+ if isinstance(i, (File, str)):
+ self.depend_files.append(i)
+ else:
+ mlog.debug(i)
+ raise InvalidArguments(f'Unknown type {type(i).__name__!r} in depend_files.')
+ self.env = kwargs.get('env')
+
+ def get_dependencies(self):
+ return self.dependencies
+
+ def should_install(self) -> bool:
+ return self.install
+
+ def get_custom_install_dir(self):
+ return self.install_dir
+
+ def get_custom_install_mode(self):
+ return self.install_mode
+
+ def get_outputs(self) -> T.List[str]:
+ return self.outputs
+
+ def get_filename(self):
+ return self.outputs[0]
+
+ def get_sources(self):
+ return self.sources
+
+ def get_generated_lists(self):
+ genlists = []
+ for c in self.sources:
+ if isinstance(c, GeneratedList):
+ genlists.append(c)
+ return genlists
+
+ def get_generated_sources(self):
+ return self.get_generated_lists()
+
+ def get_dep_outname(self, infilenames):
+ if self.depfile is None:
+ raise InvalidArguments('Tried to get depfile name for custom_target that does not have depfile defined.')
+ if infilenames:
+ plainname = os.path.basename(infilenames[0])
+ basename = os.path.splitext(plainname)[0]
+ return self.depfile.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname)
+ else:
+ if '@BASENAME@' in self.depfile or '@PLAINNAME@' in self.depfile:
+ raise InvalidArguments('Substitution in depfile for custom_target that does not have an input file.')
+ return self.depfile
+
+ def is_linkable_target(self):
+ if len(self.outputs) != 1:
+ return False
+ suf = os.path.splitext(self.outputs[0])[-1]
+ if suf == '.a' or suf == '.dll' or suf == '.lib' or suf == '.so' or suf == '.dylib':
+ return True
+
+ def get_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]:
+ return {}
+
+ def get_link_dep_subdirs(self):
+ return OrderedSet()
+
+ def get_all_link_deps(self):
+ return []
+
+ def is_internal(self) -> bool:
+ if not self.should_install():
+ return True
+ for out in self.get_outputs():
+ # Can't check if this is a static library, so try to guess
+ if not out.endswith(('.a', '.lib')):
+ return False
+ return True
+
+ def extract_all_objects_recurse(self):
+ return self.get_outputs()
+
+ def type_suffix(self):
+ return "@cus"
+
+ def __getitem__(self, index: int) -> 'CustomTargetIndex':
+ return CustomTargetIndex(self, self.outputs[index])
+
+ def __setitem__(self, index, value):
+ raise NotImplementedError
+
+ def __delitem__(self, index):
+ raise NotImplementedError
+
+ def __iter__(self):
+ for i in self.outputs:
+ yield CustomTargetIndex(self, i)
+
+class RunTarget(Target, CommandBase):
+ def __init__(self, name, command, dependencies, subdir, subproject, env=None):
+ self.typename = 'run'
+ # These don't produce output artifacts
+ super().__init__(name, subdir, subproject, False, MachineChoice.BUILD)
+ self.dependencies = dependencies
+ self.depend_files = []
+ self.command = self.flatten_command(command)
+ self.absolute_paths = False
+ self.env = env
+
+ def __repr__(self):
+ repr_str = "<{0} {1}: {2}>"
+ return repr_str.format(self.__class__.__name__, self.get_id(), self.command[0])
+
+ def process_kwargs(self, kwargs):
+ return self.process_kwargs_base(kwargs)
+
+ def get_dependencies(self):
+ return self.dependencies
+
+ def get_generated_sources(self):
+ return []
+
+ def get_sources(self):
+ return []
+
+ def should_install(self) -> bool:
+ return False
+
+ def get_filename(self) -> str:
+ return self.name
+
+ def get_outputs(self) -> T.List[str]:
+ if isinstance(self.name, str):
+ return [self.name]
+ elif isinstance(self.name, list):
+ return self.name
+ else:
+ raise RuntimeError('RunTarget: self.name is neither a list nor a string. This is a bug')
+
+ def type_suffix(self):
+ return "@run"
+
+class AliasTarget(RunTarget):
+ def __init__(self, name, dependencies, subdir, subproject):
+ super().__init__(name, [], dependencies, subdir, subproject)
+
+class Jar(BuildTarget):
+ known_kwargs = known_jar_kwargs
+
+ def __init__(self, name, subdir, subproject, for_machine: MachineChoice, sources, objects, environment, kwargs):
+ self.typename = 'jar'
+ super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs)
+ for s in self.sources:
+ if not s.endswith('.java'):
+ raise InvalidArguments(f'Jar source {s} is not a java file.')
+ for t in self.link_targets:
+ if not isinstance(t, Jar):
+ raise InvalidArguments(f'Link target {t} is not a jar target.')
+ self.filename = self.name + '.jar'
+ self.outputs = [self.filename]
+ self.java_args = kwargs.get('java_args', [])
+
+ def get_main_class(self):
+ return self.main_class
+
+ def type_suffix(self):
+ return "@jar"
+
+ def get_java_args(self):
+ return self.java_args
+
+ def validate_install(self, environment):
+ # All jar targets are installable.
+ pass
+
+ def is_linkable_target(self):
+ return True
+
+ def get_classpath_args(self):
+ cp_paths = [os.path.join(l.get_subdir(), l.get_filename()) for l in self.link_targets]
+ cp_string = os.pathsep.join(cp_paths)
+ if cp_string:
+ return ['-cp', os.pathsep.join(cp_paths)]
+ return []
+
+class CustomTargetIndex(HoldableObject):
+
+ """A special opaque object returned by indexing a CustomTarget. This object
+ exists in Meson, but acts as a proxy in the backends, making targets depend
+ on the CustomTarget it's derived from, but only adding one source file to
+ the sources.
+ """
+
+ def __init__(self, target: CustomTarget, output: int):
+ self.typename = 'custom'
+ self.target = target
+ self.output = output
+ self.for_machine = target.for_machine
+
+ def __repr__(self):
+ return '<CustomTargetIndex: {!r}[{}]>'.format(
+ self.target, self.target.get_outputs().index(self.output))
+
+ def get_outputs(self) -> T.List[str]:
+ return [self.output]
+
+ def get_subdir(self) -> str:
+ return self.target.get_subdir()
+
+ def get_filename(self):
+ return self.output
+
+ def get_id(self):
+ return self.target.get_id()
+
+ def get_all_link_deps(self):
+ return self.target.get_all_link_deps()
+
+ def get_link_deps_mapping(self, prefix: str, environment: environment.Environment) -> T.Mapping[str, str]:
+ return self.target.get_link_deps_mapping(prefix, environment)
+
+ def get_link_dep_subdirs(self):
+ return self.target.get_link_dep_subdirs()
+
+ def is_linkable_target(self):
+ suf = os.path.splitext(self.output)[-1]
+ if suf == '.a' or suf == '.dll' or suf == '.lib' or suf == '.so':
+ return True
+
+ def should_install(self) -> bool:
+ return self.target.should_install()
+
+ def is_internal(self) -> bool:
+ return self.target.is_internal()
+
+ def extract_all_objects_recurse(self):
+ return self.target.extract_all_objects_recurse()
+
+ def get_custom_install_dir(self):
+ return self.target.get_custom_install_dir()
+
+class ConfigurationData(HoldableObject):
+ def __init__(self) -> None:
+ super().__init__()
+ self.values: T.Dict[
+ str,
+ T.Tuple[
+ T.Union[str, int, bool],
+ T.Optional[str]
+ ]
+ ] = {}
+
+ def __repr__(self):
+ return repr(self.values)
+
+ def __contains__(self, value: str) -> bool:
+ return value in self.values
+
+ def get(self, name: str) -> T.Tuple[T.Union[str, int, bool], T.Optional[str]]:
+ return self.values[name] # (val, desc)
+
+ def keys(self) -> T.Iterator[str]:
+ return self.values.keys()
+
+# A bit poorly named, but this represents plain data files to copy
+# during install.
+class Data(HoldableObject):
+ def __init__(self, sources: T.List[File], install_dir: str,
+ install_mode: 'FileMode', subproject: str,
+ rename: T.List[str] = None):
+ self.sources = sources
+ self.install_dir = install_dir
+ self.install_mode = install_mode
+ if rename is None:
+ self.rename = [os.path.basename(f.fname) for f in self.sources]
+ else:
+ self.rename = rename
+ self.subproject = subproject
+
+class TestSetup:
+ def __init__(self, exe_wrapper: T.Optional[T.List[str]], gdb: bool,
+ timeout_multiplier: int, env: EnvironmentVariables,
+ exclude_suites: T.List[str]):
+ self.exe_wrapper = exe_wrapper
+ self.gdb = gdb
+ self.timeout_multiplier = timeout_multiplier
+ self.env = env
+ self.exclude_suites = exclude_suites
+
+def get_sources_string_names(sources, backend):
+ '''
+ For the specified list of @sources which can be strings, Files, or targets,
+ get all the output basenames.
+ '''
+ names = []
+ for s in sources:
+ if isinstance(s, str):
+ names.append(s)
+ elif isinstance(s, (BuildTarget, CustomTarget, CustomTargetIndex, GeneratedList)):
+ names += s.get_outputs()
+ elif isinstance(s, ExtractedObjects):
+ names += s.get_outputs(backend)
+ elif isinstance(s, File):
+ names.append(s.fname)
+ else:
+ raise AssertionError(f'Unknown source type: {s!r}')
+ return names
+
+def load(build_dir: str) -> Build:
+ filename = os.path.join(build_dir, 'meson-private', 'build.dat')
+ load_fail_msg = f'Build data file {filename!r} is corrupted. Try with a fresh build tree.'
+ nonexisting_fail_msg = f'No such build data file as "{filename!r}".'
+ try:
+ with open(filename, 'rb') as f:
+ obj = pickle.load(f)
+ except FileNotFoundError:
+ raise MesonException(nonexisting_fail_msg)
+ except (pickle.UnpicklingError, EOFError):
+ raise MesonException(load_fail_msg)
+ except AttributeError:
+ raise MesonException(
+ f"Build data file {filename!r} references functions or classes that don't "
+ "exist. This probably means that it was generated with an old "
+ "version of meson. Try running from the source directory "
+ f"meson {build_dir} --wipe")
+ if not isinstance(obj, Build):
+ raise MesonException(load_fail_msg)
+ return obj
+
+def save(obj: Build, filename: str) -> None:
+ with open(filename, 'wb') as f:
+ pickle.dump(obj, f)
diff --git a/meson/mesonbuild/cmake/__init__.py b/meson/mesonbuild/cmake/__init__.py
new file mode 100644
index 000000000..d39bf2424
--- /dev/null
+++ b/meson/mesonbuild/cmake/__init__.py
@@ -0,0 +1,46 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+__all__ = [
+ 'CMakeClient',
+ 'CMakeExecutor',
+ 'CMakeExecScope',
+ 'CMakeException',
+ 'CMakeFileAPI',
+ 'CMakeInterpreter',
+ 'CMakeTarget',
+ 'CMakeToolchain',
+ 'CMakeTraceLine',
+ 'CMakeTraceParser',
+ 'SingleTargetOptions',
+ 'TargetOptions',
+ 'parse_generator_expressions',
+ 'language_map',
+ 'backend_generator_map',
+ 'cmake_get_generator_args',
+ 'cmake_defines_to_args',
+ 'check_cmake_args',
+]
+
+from .common import CMakeException, SingleTargetOptions, TargetOptions, cmake_defines_to_args, language_map, backend_generator_map, cmake_get_generator_args, check_cmake_args
+from .client import CMakeClient
+from .executor import CMakeExecutor
+from .fileapi import CMakeFileAPI
+from .generator import parse_generator_expressions
+from .interpreter import CMakeInterpreter
+from .toolchain import CMakeToolchain, CMakeExecScope
+from .traceparser import CMakeTarget, CMakeTraceLine, CMakeTraceParser
diff --git a/meson/mesonbuild/cmake/client.py b/meson/mesonbuild/cmake/client.py
new file mode 100644
index 000000000..bcbb52ef9
--- /dev/null
+++ b/meson/mesonbuild/cmake/client.py
@@ -0,0 +1,373 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+from .common import CMakeException, CMakeConfiguration, CMakeBuildFile
+from .. import mlog
+from contextlib import contextmanager
+from subprocess import Popen, PIPE, TimeoutExpired
+from pathlib import Path
+import typing as T
+import json
+
+if T.TYPE_CHECKING:
+ from ..environment import Environment
+ from .executor import CMakeExecutor
+
+CMAKE_SERVER_BEGIN_STR = '[== "CMake Server" ==['
+CMAKE_SERVER_END_STR = ']== "CMake Server" ==]'
+
+CMAKE_MESSAGE_TYPES = {
+ 'error': ['cookie', 'errorMessage'],
+ 'hello': ['supportedProtocolVersions'],
+ 'message': ['cookie', 'message'],
+ 'progress': ['cookie'],
+ 'reply': ['cookie', 'inReplyTo'],
+ 'signal': ['cookie', 'name'],
+} # type: T.Dict[str, T.List[str]]
+
+CMAKE_REPLY_TYPES = {
+ 'handshake': [],
+ 'configure': [],
+ 'compute': [],
+ 'cmakeInputs': ['buildFiles', 'cmakeRootDirectory', 'sourceDirectory'],
+ 'codemodel': ['configurations']
+} # type: T.Dict[str, T.List[str]]
+
+# Base CMake server message classes
+
+class MessageBase:
+ def __init__(self, msg_type: str, cookie: str) -> None:
+ self.type = msg_type
+ self.cookie = cookie
+
+ def to_dict(self) -> T.Dict[str, T.Union[str, T.List[str], T.Dict[str, int]]]:
+ return {'type': self.type, 'cookie': self.cookie}
+
+ def log(self) -> None:
+ mlog.warning('CMake server message of type', mlog.bold(type(self).__name__), 'has no log function')
+
+class RequestBase(MessageBase):
+ cookie_counter = 0
+
+ def __init__(self, msg_type: str) -> None:
+ super().__init__(msg_type, self.gen_cookie())
+
+ @staticmethod
+ def gen_cookie() -> str:
+ RequestBase.cookie_counter += 1
+ return f'meson_{RequestBase.cookie_counter}'
+
+class ReplyBase(MessageBase):
+ def __init__(self, cookie: str, in_reply_to: str) -> None:
+ super().__init__('reply', cookie)
+ self.in_reply_to = in_reply_to
+
+class SignalBase(MessageBase):
+ def __init__(self, cookie: str, signal_name: str) -> None:
+ super().__init__('signal', cookie)
+ self.signal_name = signal_name
+
+ def log(self) -> None:
+ mlog.log(mlog.bold('CMake signal:'), mlog.yellow(self.signal_name))
+
+# Special Message classes
+
+class Error(MessageBase):
+ def __init__(self, cookie: str, message: str) -> None:
+ super().__init__('error', cookie)
+ self.message = message
+
+ def log(self) -> None:
+ mlog.error(mlog.bold('CMake server error:'), mlog.red(self.message))
+
+class Message(MessageBase):
+ def __init__(self, cookie: str, message: str) -> None:
+ super().__init__('message', cookie)
+ self.message = message
+
+ def log(self) -> None:
+ #mlog.log(mlog.bold('CMake:'), self.message)
+ pass
+
+class Progress(MessageBase):
+ def __init__(self, cookie: str) -> None:
+ super().__init__('progress', cookie)
+
+ def log(self) -> None:
+ pass
+
+class MessageHello(MessageBase):
+ def __init__(self, supported_protocol_versions: T.List[T.Dict[str, int]]) -> None:
+ super().__init__('hello', '')
+ self.supported_protocol_versions = supported_protocol_versions
+
+ def supports(self, major: int, minor: T.Optional[int] = None) -> bool:
+ for i in self.supported_protocol_versions:
+ assert 'major' in i
+ assert 'minor' in i
+ if major == i['major']:
+ if minor is None or minor == i['minor']:
+ return True
+ return False
+
+# Request classes
+
+class RequestHandShake(RequestBase):
+ def __init__(self, src_dir: Path, build_dir: Path, generator: str, vers_major: int, vers_minor: T.Optional[int] = None) -> None:
+ super().__init__('handshake')
+ self.src_dir = src_dir
+ self.build_dir = build_dir
+ self.generator = generator
+ self.vers_major = vers_major
+ self.vers_minor = vers_minor
+
+ def to_dict(self) -> T.Dict[str, T.Union[str, T.List[str], T.Dict[str, int]]]:
+ vers = {'major': self.vers_major}
+ if self.vers_minor is not None:
+ vers['minor'] = self.vers_minor
+
+ # Old CMake versions (3.7) want '/' even on Windows
+ self.src_dir = self.src_dir.resolve()
+ self.build_dir = self.build_dir.resolve()
+
+ return {
+ **super().to_dict(),
+ 'sourceDirectory': self.src_dir.as_posix(),
+ 'buildDirectory': self.build_dir.as_posix(),
+ 'generator': self.generator,
+ 'protocolVersion': vers
+ }
+
+class RequestConfigure(RequestBase):
+ def __init__(self, args: T.Optional[T.List[str]] = None):
+ super().__init__('configure')
+ self.args = args
+
+ def to_dict(self) -> T.Dict[str, T.Union[str, T.List[str], T.Dict[str, int]]]:
+ res = super().to_dict()
+ if self.args:
+ res['cacheArguments'] = self.args
+ return res
+
+class RequestCompute(RequestBase):
+ def __init__(self) -> None:
+ super().__init__('compute')
+
+class RequestCMakeInputs(RequestBase):
+ def __init__(self) -> None:
+ super().__init__('cmakeInputs')
+
+class RequestCodeModel(RequestBase):
+ def __init__(self) -> None:
+ super().__init__('codemodel')
+
+# Reply classes
+
+class ReplyHandShake(ReplyBase):
+ def __init__(self, cookie: str) -> None:
+ super().__init__(cookie, 'handshake')
+
+class ReplyConfigure(ReplyBase):
+ def __init__(self, cookie: str) -> None:
+ super().__init__(cookie, 'configure')
+
+class ReplyCompute(ReplyBase):
+ def __init__(self, cookie: str) -> None:
+ super().__init__(cookie, 'compute')
+
+class ReplyCMakeInputs(ReplyBase):
+ def __init__(self, cookie: str, cmake_root: Path, src_dir: Path, build_files: T.List[CMakeBuildFile]) -> None:
+ super().__init__(cookie, 'cmakeInputs')
+ self.cmake_root = cmake_root
+ self.src_dir = src_dir
+ self.build_files = build_files
+
+ def log(self) -> None:
+ mlog.log('CMake root: ', mlog.bold(self.cmake_root.as_posix()))
+ mlog.log('Source dir: ', mlog.bold(self.src_dir.as_posix()))
+ mlog.log('Build files:', mlog.bold(str(len(self.build_files))))
+ with mlog.nested():
+ for i in self.build_files:
+ mlog.log(str(i))
+
+class ReplyCodeModel(ReplyBase):
+ def __init__(self, data: T.Dict[str, T.Any]) -> None:
+ super().__init__(data['cookie'], 'codemodel')
+ self.configs = []
+ for i in data['configurations']:
+ self.configs += [CMakeConfiguration(i)]
+
+ def log(self) -> None:
+ mlog.log('CMake code mode:')
+ for idx, i in enumerate(self.configs):
+ mlog.log(f'Configuration {idx}:')
+ with mlog.nested():
+ i.log()
+
+# Main client class
+
+class CMakeClient:
+ def __init__(self, env: 'Environment') -> None:
+ self.env = env
+ self.proc = None # type: T.Optional[Popen]
+ self.type_map = {
+ 'error': lambda data: Error(data['cookie'], data['errorMessage']),
+ 'hello': lambda data: MessageHello(data['supportedProtocolVersions']),
+ 'message': lambda data: Message(data['cookie'], data['message']),
+ 'progress': lambda data: Progress(data['cookie']),
+ 'reply': self.resolve_type_reply,
+ 'signal': lambda data: SignalBase(data['cookie'], data['name'])
+ } # type: T.Dict[str, T.Callable[[T.Dict[str, T.Any]], MessageBase]]
+
+ self.reply_map = {
+ 'handshake': lambda data: ReplyHandShake(data['cookie']),
+ 'configure': lambda data: ReplyConfigure(data['cookie']),
+ 'compute': lambda data: ReplyCompute(data['cookie']),
+ 'cmakeInputs': self.resolve_reply_cmakeInputs,
+ 'codemodel': lambda data: ReplyCodeModel(data),
+ } # type: T.Dict[str, T.Callable[[T.Dict[str, T.Any]], ReplyBase]]
+
+ def readMessageRaw(self) -> T.Dict[str, T.Any]:
+ assert self.proc is not None
+ rawData = []
+ begin = False
+ while self.proc.poll() is None:
+ line = self.proc.stdout.readline()
+ if not line:
+ break
+ line = line.decode('utf-8')
+ line = line.strip()
+
+ if begin and line == CMAKE_SERVER_END_STR:
+ break # End of the message
+ elif begin:
+ rawData += [line]
+ elif line == CMAKE_SERVER_BEGIN_STR:
+ begin = True # Begin of the message
+
+ if rawData:
+ res = json.loads('\n'.join(rawData))
+ assert isinstance(res, dict)
+ for i in res.keys():
+ assert isinstance(i, str)
+ return res
+ raise CMakeException('Failed to read data from the CMake server')
+
+ def readMessage(self) -> MessageBase:
+ raw_data = self.readMessageRaw()
+ if 'type' not in raw_data:
+ raise CMakeException('The "type" attribute is missing from the message')
+ msg_type = raw_data['type']
+ func = self.type_map.get(msg_type, None)
+ if not func:
+ raise CMakeException(f'Recieved unknown message type "{msg_type}"')
+ for i in CMAKE_MESSAGE_TYPES[msg_type]:
+ if i not in raw_data:
+ raise CMakeException(f'Key "{i}" is missing from CMake server message type {msg_type}')
+ return func(raw_data)
+
+ def writeMessage(self, msg: MessageBase) -> None:
+ raw_data = '\n{}\n{}\n{}\n'.format(CMAKE_SERVER_BEGIN_STR, json.dumps(msg.to_dict(), indent=2), CMAKE_SERVER_END_STR)
+ self.proc.stdin.write(raw_data.encode('ascii'))
+ self.proc.stdin.flush()
+
+ def query(self, request: RequestBase) -> MessageBase:
+ self.writeMessage(request)
+ while True:
+ reply = self.readMessage()
+ if reply.cookie == request.cookie and reply.type in ['reply', 'error']:
+ return reply
+
+ reply.log()
+
+ def query_checked(self, request: RequestBase, message: str) -> MessageBase:
+ reply = self.query(request)
+ h = mlog.green('SUCCEEDED') if reply.type == 'reply' else mlog.red('FAILED')
+ mlog.log(message + ':', h)
+ if reply.type != 'reply':
+ reply.log()
+ raise CMakeException('CMake server query failed')
+ return reply
+
+ def do_handshake(self, src_dir: Path, build_dir: Path, generator: str, vers_major: int, vers_minor: T.Optional[int] = None) -> None:
+ # CMake prints the hello message on startup
+ msg = self.readMessage()
+ if not isinstance(msg, MessageHello):
+ raise CMakeException('Recieved an unexpected message from the CMake server')
+
+ request = RequestHandShake(src_dir, build_dir, generator, vers_major, vers_minor)
+ self.query_checked(request, 'CMake server handshake')
+
+ def resolve_type_reply(self, data: T.Dict[str, T.Any]) -> ReplyBase:
+ reply_type = data['inReplyTo']
+ func = self.reply_map.get(reply_type, None)
+ if not func:
+ raise CMakeException(f'Recieved unknown reply type "{reply_type}"')
+ for i in ['cookie'] + CMAKE_REPLY_TYPES[reply_type]:
+ if i not in data:
+ raise CMakeException(f'Key "{i}" is missing from CMake server message type {type}')
+ return func(data)
+
+ def resolve_reply_cmakeInputs(self, data: T.Dict[str, T.Any]) -> ReplyCMakeInputs:
+ files = []
+ for i in data['buildFiles']:
+ for j in i['sources']:
+ files += [CMakeBuildFile(Path(j), i['isCMake'], i['isTemporary'])]
+ return ReplyCMakeInputs(data['cookie'], Path(data['cmakeRootDirectory']), Path(data['sourceDirectory']), files)
+
+ @contextmanager
+ def connect(self, cmake_exe: 'CMakeExecutor') -> T.Generator[None, None, None]:
+ self.startup(cmake_exe)
+ try:
+ yield
+ finally:
+ self.shutdown()
+
+ def startup(self, cmake_exe: 'CMakeExecutor') -> None:
+ if self.proc is not None:
+ raise CMakeException('The CMake server was already started')
+ assert cmake_exe.found()
+
+ mlog.debug('Starting CMake server with CMake', mlog.bold(' '.join(cmake_exe.get_command())), 'version', mlog.cyan(cmake_exe.version()))
+ self.proc = Popen(cmake_exe.get_command() + ['-E', 'server', '--experimental', '--debug'], stdin=PIPE, stdout=PIPE)
+
+ def shutdown(self) -> None:
+ if self.proc is None:
+ return
+
+ mlog.debug('Shutting down the CMake server')
+
+ # Close the pipes to exit
+ self.proc.stdin.close()
+ self.proc.stdout.close()
+
+ # Wait for CMake to finish
+ try:
+ self.proc.wait(timeout=2)
+ except TimeoutExpired:
+ # Terminate CMake if there is a timeout
+ # terminate() may throw a platform specific exception if the process has already
+ # terminated. This may be the case if there is a race condition (CMake exited after
+ # the timeout but before the terminate() call). Additionally, this behavior can
+ # also be triggered on cygwin if CMake crashes.
+ # See https://github.com/mesonbuild/meson/pull/4969#issuecomment-499413233
+ try:
+ self.proc.terminate()
+ except Exception:
+ pass
+
+ self.proc = None
diff --git a/meson/mesonbuild/cmake/common.py b/meson/mesonbuild/cmake/common.py
new file mode 100644
index 000000000..5cc154cb8
--- /dev/null
+++ b/meson/mesonbuild/cmake/common.py
@@ -0,0 +1,334 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+from ..mesonlib import MesonException, OptionKey
+from .. import mlog
+from pathlib import Path
+import typing as T
+
+if T.TYPE_CHECKING:
+ from ..environment import Environment
+
+language_map = {
+ 'c': 'C',
+ 'cpp': 'CXX',
+ 'cuda': 'CUDA',
+ 'objc': 'OBJC',
+ 'objcpp': 'OBJCXX',
+ 'cs': 'CSharp',
+ 'java': 'Java',
+ 'fortran': 'Fortran',
+ 'swift': 'Swift',
+}
+
+backend_generator_map = {
+ 'ninja': 'Ninja',
+ 'xcode': 'Xcode',
+ 'vs2010': 'Visual Studio 10 2010',
+ 'vs2012': 'Visual Studio 11 2012',
+ 'vs2013': 'Visual Studio 12 2013',
+ 'vs2015': 'Visual Studio 14 2015',
+ 'vs2017': 'Visual Studio 15 2017',
+ 'vs2019': 'Visual Studio 16 2019',
+}
+
+blacklist_cmake_defs = [
+ 'CMAKE_TOOLCHAIN_FILE',
+ 'CMAKE_PROJECT_INCLUDE',
+ 'MESON_PRELOAD_FILE',
+ 'MESON_PS_CMAKE_CURRENT_BINARY_DIR',
+ 'MESON_PS_CMAKE_CURRENT_SOURCE_DIR',
+ 'MESON_PS_DELAYED_CALLS',
+ 'MESON_PS_LOADED',
+ 'MESON_FIND_ROOT_PATH',
+ 'MESON_CMAKE_SYSROOT',
+ 'MESON_PATHS_LIST',
+ 'MESON_CMAKE_ROOT',
+]
+
+class CMakeException(MesonException):
+ pass
+
+class CMakeBuildFile:
+ def __init__(self, file: Path, is_cmake: bool, is_temp: bool) -> None:
+ self.file = file
+ self.is_cmake = is_cmake
+ self.is_temp = is_temp
+
+ def __repr__(self) -> str:
+ return f'<{self.__class__.__name__}: {self.file}; cmake={self.is_cmake}; temp={self.is_temp}>'
+
+def _flags_to_list(raw: str) -> T.List[str]:
+ # Convert a raw commandline string into a list of strings
+ res = []
+ curr = ''
+ escape = False
+ in_string = False
+ for i in raw:
+ if escape:
+ # If the current char is not a quote, the '\' is probably important
+ if i not in ['"', "'"]:
+ curr += '\\'
+ curr += i
+ escape = False
+ elif i == '\\':
+ escape = True
+ elif i in ['"', "'"]:
+ in_string = not in_string
+ elif i in [' ', '\n']:
+ if in_string:
+ curr += i
+ else:
+ res += [curr]
+ curr = ''
+ else:
+ curr += i
+ res += [curr]
+ res = list(filter(lambda x: len(x) > 0, res))
+ return res
+
+def cmake_get_generator_args(env: 'Environment') -> T.List[str]:
+ backend_name = env.coredata.get_option(OptionKey('backend'))
+ assert isinstance(backend_name, str)
+ assert backend_name in backend_generator_map
+ return ['-G', backend_generator_map[backend_name]]
+
+def cmake_defines_to_args(raw: T.Any, permissive: bool = False) -> T.List[str]:
+ res = [] # type: T.List[str]
+ if not isinstance(raw, list):
+ raw = [raw]
+
+ for i in raw:
+ if not isinstance(i, dict):
+ raise MesonException('Invalid CMake defines. Expected a dict, but got a {}'.format(type(i).__name__))
+ for key, val in i.items():
+ assert isinstance(key, str)
+ if key in blacklist_cmake_defs:
+ mlog.warning('Setting', mlog.bold(key), 'is not supported. See the meson docs for cross compilation support:')
+ mlog.warning(' - URL: https://mesonbuild.com/CMake-module.html#cross-compilation')
+ mlog.warning(' --> Ignoring this option')
+ continue
+ if isinstance(val, (str, int, float)):
+ res += [f'-D{key}={val}']
+ elif isinstance(val, bool):
+ val_str = 'ON' if val else 'OFF'
+ res += [f'-D{key}={val_str}']
+ else:
+ raise MesonException('Type "{}" of "{}" is not supported as for a CMake define value'.format(type(val).__name__, key))
+
+ return res
+
+# TODO: this functuin will become obsolete once the `cmake_args` kwarg is dropped
+def check_cmake_args(args: T.List[str]) -> T.List[str]:
+ res = [] # type: T.List[str]
+ dis = ['-D' + x for x in blacklist_cmake_defs]
+ assert dis # Ensure that dis is not empty.
+ for i in args:
+ if any([i.startswith(x) for x in dis]):
+ mlog.warning('Setting', mlog.bold(i), 'is not supported. See the meson docs for cross compilation support:')
+ mlog.warning(' - URL: https://mesonbuild.com/CMake-module.html#cross-compilation')
+ mlog.warning(' --> Ignoring this option')
+ continue
+ res += [i]
+ return res
+
+class CMakeInclude:
+ def __init__(self, path: Path, isSystem: bool = False):
+ self.path = path
+ self.isSystem = isSystem
+
+ def __repr__(self) -> str:
+ return f'<CMakeInclude: {self.path} -- isSystem = {self.isSystem}>'
+
+class CMakeFileGroup:
+ def __init__(self, data: T.Dict[str, T.Any]) -> None:
+ self.defines = data.get('defines', '') # type: str
+ self.flags = _flags_to_list(data.get('compileFlags', '')) # type: T.List[str]
+ self.is_generated = data.get('isGenerated', False) # type: bool
+ self.language = data.get('language', 'C') # type: str
+ self.sources = [Path(x) for x in data.get('sources', [])] # type: T.List[Path]
+
+ # Fix the include directories
+ self.includes = [] # type: T.List[CMakeInclude]
+ for i in data.get('includePath', []):
+ if isinstance(i, dict) and 'path' in i:
+ isSystem = i.get('isSystem', False)
+ assert isinstance(isSystem, bool)
+ assert isinstance(i['path'], str)
+ self.includes += [CMakeInclude(Path(i['path']), isSystem)]
+ elif isinstance(i, str):
+ self.includes += [CMakeInclude(Path(i))]
+
+ def log(self) -> None:
+ mlog.log('flags =', mlog.bold(', '.join(self.flags)))
+ mlog.log('defines =', mlog.bold(', '.join(self.defines)))
+ mlog.log('includes =', mlog.bold(', '.join([str(x) for x in self.includes])))
+ mlog.log('is_generated =', mlog.bold('true' if self.is_generated else 'false'))
+ mlog.log('language =', mlog.bold(self.language))
+ mlog.log('sources:')
+ for i in self.sources:
+ with mlog.nested():
+ mlog.log(i.as_posix())
+
+class CMakeTarget:
+ def __init__(self, data: T.Dict[str, T.Any]) -> None:
+ self.artifacts = [Path(x) for x in data.get('artifacts', [])] # type: T.List[Path]
+ self.src_dir = Path(data.get('sourceDirectory', '')) # type: Path
+ self.build_dir = Path(data.get('buildDirectory', '')) # type: Path
+ self.name = data.get('name', '') # type: str
+ self.full_name = data.get('fullName', '') # type: str
+ self.install = data.get('hasInstallRule', False) # type: bool
+ self.install_paths = [Path(x) for x in set(data.get('installPaths', []))] # type: T.List[Path]
+ self.link_lang = data.get('linkerLanguage', '') # type: str
+ self.link_libraries = _flags_to_list(data.get('linkLibraries', '')) # type: T.List[str]
+ self.link_flags = _flags_to_list(data.get('linkFlags', '')) # type: T.List[str]
+ self.link_lang_flags = _flags_to_list(data.get('linkLanguageFlags', '')) # type: T.List[str]
+ # self.link_path = Path(data.get('linkPath', '')) # type: Path
+ self.type = data.get('type', 'EXECUTABLE') # type: str
+ # self.is_generator_provided = data.get('isGeneratorProvided', False) # type: bool
+ self.files = [] # type: T.List[CMakeFileGroup]
+
+ for i in data.get('fileGroups', []):
+ self.files += [CMakeFileGroup(i)]
+
+ def log(self) -> None:
+ mlog.log('artifacts =', mlog.bold(', '.join([x.as_posix() for x in self.artifacts])))
+ mlog.log('src_dir =', mlog.bold(self.src_dir.as_posix()))
+ mlog.log('build_dir =', mlog.bold(self.build_dir.as_posix()))
+ mlog.log('name =', mlog.bold(self.name))
+ mlog.log('full_name =', mlog.bold(self.full_name))
+ mlog.log('install =', mlog.bold('true' if self.install else 'false'))
+ mlog.log('install_paths =', mlog.bold(', '.join([x.as_posix() for x in self.install_paths])))
+ mlog.log('link_lang =', mlog.bold(self.link_lang))
+ mlog.log('link_libraries =', mlog.bold(', '.join(self.link_libraries)))
+ mlog.log('link_flags =', mlog.bold(', '.join(self.link_flags)))
+ mlog.log('link_lang_flags =', mlog.bold(', '.join(self.link_lang_flags)))
+ # mlog.log('link_path =', mlog.bold(self.link_path))
+ mlog.log('type =', mlog.bold(self.type))
+ # mlog.log('is_generator_provided =', mlog.bold('true' if self.is_generator_provided else 'false'))
+ for idx, i in enumerate(self.files):
+ mlog.log(f'Files {idx}:')
+ with mlog.nested():
+ i.log()
+
+class CMakeProject:
+ def __init__(self, data: T.Dict[str, T.Any]) -> None:
+ self.src_dir = Path(data.get('sourceDirectory', '')) # type: Path
+ self.build_dir = Path(data.get('buildDirectory', '')) # type: Path
+ self.name = data.get('name', '') # type: str
+ self.targets = [] # type: T.List[CMakeTarget]
+
+ for i in data.get('targets', []):
+ self.targets += [CMakeTarget(i)]
+
+ def log(self) -> None:
+ mlog.log('src_dir =', mlog.bold(self.src_dir.as_posix()))
+ mlog.log('build_dir =', mlog.bold(self.build_dir.as_posix()))
+ mlog.log('name =', mlog.bold(self.name))
+ for idx, i in enumerate(self.targets):
+ mlog.log(f'Target {idx}:')
+ with mlog.nested():
+ i.log()
+
+class CMakeConfiguration:
+ def __init__(self, data: T.Dict[str, T.Any]) -> None:
+ self.name = data.get('name', '') # type: str
+ self.projects = [] # type: T.List[CMakeProject]
+ for i in data.get('projects', []):
+ self.projects += [CMakeProject(i)]
+
+ def log(self) -> None:
+ mlog.log('name =', mlog.bold(self.name))
+ for idx, i in enumerate(self.projects):
+ mlog.log(f'Project {idx}:')
+ with mlog.nested():
+ i.log()
+
+class SingleTargetOptions:
+ def __init__(self) -> None:
+ self.opts = {} # type: T.Dict[str, str]
+ self.lang_args = {} # type: T.Dict[str, T.List[str]]
+ self.link_args = [] # type: T.List[str]
+ self.install = 'preserve'
+
+ def set_opt(self, opt: str, val: str) -> None:
+ self.opts[opt] = val
+
+ def append_args(self, lang: str, args: T.List[str]) -> None:
+ if lang not in self.lang_args:
+ self.lang_args[lang] = []
+ self.lang_args[lang] += args
+
+ def append_link_args(self, args: T.List[str]) -> None:
+ self.link_args += args
+
+ def set_install(self, install: bool) -> None:
+ self.install = 'true' if install else 'false'
+
+ def get_override_options(self, initial: T.List[str]) -> T.List[str]:
+ res = [] # type: T.List[str]
+ for i in initial:
+ opt = i[:i.find('=')]
+ if opt not in self.opts:
+ res += [i]
+ res += [f'{k}={v}' for k, v in self.opts.items()]
+ return res
+
+ def get_compile_args(self, lang: str, initial: T.List[str]) -> T.List[str]:
+ if lang in self.lang_args:
+ return initial + self.lang_args[lang]
+ return initial
+
+ def get_link_args(self, initial: T.List[str]) -> T.List[str]:
+ return initial + self.link_args
+
+ def get_install(self, initial: bool) -> bool:
+ return {'preserve': initial, 'true': True, 'false': False}[self.install]
+
+class TargetOptions:
+ def __init__(self) -> None:
+ self.global_options = SingleTargetOptions()
+ self.target_options = {} # type: T.Dict[str, SingleTargetOptions]
+
+ def __getitem__(self, tgt: str) -> SingleTargetOptions:
+ if tgt not in self.target_options:
+ self.target_options[tgt] = SingleTargetOptions()
+ return self.target_options[tgt]
+
+ def get_override_options(self, tgt: str, initial: T.List[str]) -> T.List[str]:
+ initial = self.global_options.get_override_options(initial)
+ if tgt in self.target_options:
+ initial = self.target_options[tgt].get_override_options(initial)
+ return initial
+
+ def get_compile_args(self, tgt: str, lang: str, initial: T.List[str]) -> T.List[str]:
+ initial = self.global_options.get_compile_args(lang, initial)
+ if tgt in self.target_options:
+ initial = self.target_options[tgt].get_compile_args(lang, initial)
+ return initial
+
+ def get_link_args(self, tgt: str, initial: T.List[str]) -> T.List[str]:
+ initial = self.global_options.get_link_args(initial)
+ if tgt in self.target_options:
+ initial = self.target_options[tgt].get_link_args(initial)
+ return initial
+
+ def get_install(self, tgt: str, initial: bool) -> bool:
+ initial = self.global_options.get_install(initial)
+ if tgt in self.target_options:
+ initial = self.target_options[tgt].get_install(initial)
+ return initial
diff --git a/meson/mesonbuild/cmake/data/preload.cmake b/meson/mesonbuild/cmake/data/preload.cmake
new file mode 100644
index 000000000..234860b75
--- /dev/null
+++ b/meson/mesonbuild/cmake/data/preload.cmake
@@ -0,0 +1,82 @@
+if(MESON_PS_LOADED)
+ return()
+endif()
+
+set(MESON_PS_LOADED ON)
+
+cmake_policy(PUSH)
+cmake_policy(SET CMP0054 NEW) # https://cmake.org/cmake/help/latest/policy/CMP0054.html
+
+# Dummy macros that have a special meaning in the meson code
+macro(meson_ps_execute_delayed_calls)
+endmacro()
+
+macro(meson_ps_reload_vars)
+endmacro()
+
+macro(meson_ps_disabled_function)
+ message(WARNING "The function '${ARGV0}' is disabled in the context of CMake subprojects.\n"
+ "This should not be an issue but may lead to compilation errors.")
+endmacro()
+
+# Helper macro to inspect the current CMake state
+macro(meson_ps_inspect_vars)
+ set(MESON_PS_CMAKE_CURRENT_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}")
+ set(MESON_PS_CMAKE_CURRENT_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}")
+ meson_ps_execute_delayed_calls()
+endmacro()
+
+
+# Override some system functions with custom code and forward the args
+# to the original function
+macro(add_custom_command)
+ meson_ps_inspect_vars()
+ _add_custom_command(${ARGV})
+endmacro()
+
+macro(add_custom_target)
+ meson_ps_inspect_vars()
+ _add_custom_target(${ARGV})
+endmacro()
+
+macro(set_property)
+ meson_ps_inspect_vars()
+ _set_property(${ARGV})
+endmacro()
+
+function(set_source_files_properties)
+ set(FILES)
+ set(I 0)
+ set(PROPERTIES OFF)
+
+ while(I LESS ARGC)
+ if(NOT PROPERTIES)
+ if("${ARGV${I}}" STREQUAL "PROPERTIES")
+ set(PROPERTIES ON)
+ else()
+ list(APPEND FILES "${ARGV${I}}")
+ endif()
+
+ math(EXPR I "${I} + 1")
+ else()
+ set(ID_IDX ${I})
+ math(EXPR PROP_IDX "${ID_IDX} + 1")
+
+ set(ID "${ARGV${ID_IDX}}")
+ set(PROP "${ARGV${PROP_IDX}}")
+
+ set_property(SOURCE ${FILES} PROPERTY "${ID}" "${PROP}")
+ math(EXPR I "${I} + 2")
+ endif()
+ endwhile()
+endfunction()
+
+# Disable some functions that would mess up the CMake meson integration
+macro(target_precompile_headers)
+ meson_ps_disabled_function(target_precompile_headers)
+endmacro()
+
+set(MESON_PS_DELAYED_CALLS add_custom_command;add_custom_target;set_property)
+meson_ps_reload_vars()
+
+cmake_policy(POP)
diff --git a/meson/mesonbuild/cmake/executor.py b/meson/mesonbuild/cmake/executor.py
new file mode 100644
index 000000000..7b06f2623
--- /dev/null
+++ b/meson/mesonbuild/cmake/executor.py
@@ -0,0 +1,246 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+import subprocess as S
+from pathlib import Path
+from threading import Thread
+import typing as T
+import re
+import os
+
+from .. import mlog
+from ..environment import Environment
+from ..mesonlib import PerMachine, Popen_safe, version_compare, MachineChoice, is_windows, OptionKey
+from ..programs import find_external_program, NonExistingExternalProgram
+
+if T.TYPE_CHECKING:
+ from ..environment import Environment
+ from ..programs import ExternalProgram
+
+TYPE_result = T.Tuple[int, T.Optional[str], T.Optional[str]]
+TYPE_cache_key = T.Tuple[str, T.Tuple[str, ...], str, T.FrozenSet[T.Tuple[str, str]]]
+
+class CMakeExecutor:
+ # The class's copy of the CMake path. Avoids having to search for it
+ # multiple times in the same Meson invocation.
+ class_cmakebin = PerMachine(None, None) # type: PerMachine[T.Optional[ExternalProgram]]
+ class_cmakevers = PerMachine(None, None) # type: PerMachine[T.Optional[str]]
+ class_cmake_cache = {} # type: T.Dict[T.Any, TYPE_result]
+
+ def __init__(self, environment: 'Environment', version: str, for_machine: MachineChoice, silent: bool = False):
+ self.min_version = version
+ self.environment = environment
+ self.for_machine = for_machine
+ self.cmakebin, self.cmakevers = self.find_cmake_binary(self.environment, silent=silent)
+ self.always_capture_stderr = True
+ self.print_cmout = False
+ self.prefix_paths = [] # type: T.List[str]
+ self.extra_cmake_args = [] # type: T.List[str]
+
+ if self.cmakebin is None:
+ return
+
+ if not version_compare(self.cmakevers, self.min_version):
+ mlog.warning(
+ 'The version of CMake', mlog.bold(self.cmakebin.get_path()),
+ 'is', mlog.bold(self.cmakevers), 'but version', mlog.bold(self.min_version),
+ 'is required')
+ self.cmakebin = None
+ return
+
+ self.prefix_paths = self.environment.coredata.options[OptionKey('cmake_prefix_path', machine=self.for_machine)].value
+ if self.prefix_paths:
+ self.extra_cmake_args += ['-DCMAKE_PREFIX_PATH={}'.format(';'.join(self.prefix_paths))]
+
+ def find_cmake_binary(self, environment: Environment, silent: bool = False) -> T.Tuple[T.Optional['ExternalProgram'], T.Optional[str]]:
+ # Only search for CMake the first time and store the result in the class
+ # definition
+ if isinstance(CMakeExecutor.class_cmakebin[self.for_machine], NonExistingExternalProgram):
+ mlog.debug(f'CMake binary for {self.for_machine} is cached as not found')
+ return None, None
+ elif CMakeExecutor.class_cmakebin[self.for_machine] is not None:
+ mlog.debug(f'CMake binary for {self.for_machine} is cached.')
+ else:
+ assert CMakeExecutor.class_cmakebin[self.for_machine] is None
+
+ mlog.debug(f'CMake binary for {self.for_machine} is not cached')
+ for potential_cmakebin in find_external_program(
+ environment, self.for_machine, 'cmake', 'CMake',
+ environment.default_cmake, allow_default_for_cross=False):
+ version_if_ok = self.check_cmake(potential_cmakebin)
+ if not version_if_ok:
+ continue
+ if not silent:
+ mlog.log('Found CMake:', mlog.bold(potential_cmakebin.get_path()),
+ f'({version_if_ok})')
+ CMakeExecutor.class_cmakebin[self.for_machine] = potential_cmakebin
+ CMakeExecutor.class_cmakevers[self.for_machine] = version_if_ok
+ break
+ else:
+ if not silent:
+ mlog.log('Found CMake:', mlog.red('NO'))
+ # Set to False instead of None to signify that we've already
+ # searched for it and not found it
+ CMakeExecutor.class_cmakebin[self.for_machine] = NonExistingExternalProgram()
+ CMakeExecutor.class_cmakevers[self.for_machine] = None
+ return None, None
+
+ return CMakeExecutor.class_cmakebin[self.for_machine], CMakeExecutor.class_cmakevers[self.for_machine]
+
+ def check_cmake(self, cmakebin: 'ExternalProgram') -> T.Optional[str]:
+ if not cmakebin.found():
+ mlog.log(f'Did not find CMake {cmakebin.name!r}')
+ return None
+ try:
+ p, out = Popen_safe(cmakebin.get_command() + ['--version'])[0:2]
+ if p.returncode != 0:
+ mlog.warning('Found CMake {!r} but couldn\'t run it'
+ ''.format(' '.join(cmakebin.get_command())))
+ return None
+ except FileNotFoundError:
+ mlog.warning('We thought we found CMake {!r} but now it\'s not there. How odd!'
+ ''.format(' '.join(cmakebin.get_command())))
+ return None
+ except PermissionError:
+ msg = 'Found CMake {!r} but didn\'t have permissions to run it.'.format(' '.join(cmakebin.get_command()))
+ if not is_windows():
+ msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.'
+ mlog.warning(msg)
+ return None
+ cmvers = re.search(r'(cmake|cmake3)\s*version\s*([\d.]+)', out).group(2)
+ return cmvers
+
+ def set_exec_mode(self, print_cmout: T.Optional[bool] = None, always_capture_stderr: T.Optional[bool] = None) -> None:
+ if print_cmout is not None:
+ self.print_cmout = print_cmout
+ if always_capture_stderr is not None:
+ self.always_capture_stderr = always_capture_stderr
+
+ def _cache_key(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_cache_key:
+ fenv = frozenset(env.items()) if env is not None else frozenset()
+ targs = tuple(args)
+ return (self.cmakebin.get_path(), targs, build_dir.as_posix(), fenv)
+
+ def _call_cmout_stderr(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_result:
+ cmd = self.cmakebin.get_command() + args
+ proc = S.Popen(cmd, stdout=S.PIPE, stderr=S.PIPE, cwd=str(build_dir), env=env) # TODO [PYTHON_37]: drop Path conversion
+
+ # stdout and stderr MUST be read at the same time to avoid pipe
+ # blocking issues. The easiest way to do this is with a separate
+ # thread for one of the pipes.
+ def print_stdout() -> None:
+ while True:
+ line = proc.stdout.readline()
+ if not line:
+ break
+ mlog.log(line.decode(errors='ignore').strip('\n'))
+ proc.stdout.close()
+
+ t = Thread(target=print_stdout)
+ t.start()
+
+ try:
+ # Read stderr line by line and log non trace lines
+ raw_trace = ''
+ tline_start_reg = re.compile(r'^\s*(.*\.(cmake|txt))\(([0-9]+)\):\s*(\w+)\(.*$')
+ inside_multiline_trace = False
+ while True:
+ line_raw = proc.stderr.readline()
+ if not line_raw:
+ break
+ line = line_raw.decode(errors='ignore')
+ if tline_start_reg.match(line):
+ raw_trace += line
+ inside_multiline_trace = not line.endswith(' )\n')
+ elif inside_multiline_trace:
+ raw_trace += line
+ else:
+ mlog.warning(line.strip('\n'))
+
+ finally:
+ proc.stderr.close()
+ t.join()
+ proc.wait()
+
+ return proc.returncode, None, raw_trace
+
+ def _call_cmout(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_result:
+ cmd = self.cmakebin.get_command() + args
+ proc = S.Popen(cmd, stdout=S.PIPE, stderr=S.STDOUT, cwd=str(build_dir), env=env) # TODO [PYTHON_37]: drop Path conversion
+ while True:
+ line = proc.stdout.readline()
+ if not line:
+ break
+ mlog.log(line.decode(errors='ignore').strip('\n'))
+ proc.stdout.close()
+ proc.wait()
+ return proc.returncode, None, None
+
+ def _call_quiet(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_result:
+ build_dir.mkdir(parents=True, exist_ok=True)
+ cmd = self.cmakebin.get_command() + args
+ ret = S.run(cmd, env=env, cwd=str(build_dir), close_fds=False,
+ stdout=S.PIPE, stderr=S.PIPE, universal_newlines=False) # TODO [PYTHON_37]: drop Path conversion
+ rc = ret.returncode
+ out = ret.stdout.decode(errors='ignore')
+ err = ret.stderr.decode(errors='ignore')
+ return rc, out, err
+
+ def _call_impl(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_result:
+ mlog.debug(f'Calling CMake ({self.cmakebin.get_command()}) in {build_dir} with:')
+ for i in args:
+ mlog.debug(f' - "{i}"')
+ if not self.print_cmout:
+ return self._call_quiet(args, build_dir, env)
+ else:
+ if self.always_capture_stderr:
+ return self._call_cmout_stderr(args, build_dir, env)
+ else:
+ return self._call_cmout(args, build_dir, env)
+
+ def call(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]] = None, disable_cache: bool = False) -> TYPE_result:
+ if env is None:
+ env = os.environ.copy()
+
+ args = args + self.extra_cmake_args
+ if disable_cache:
+ return self._call_impl(args, build_dir, env)
+
+ # First check if cached, if not call the real cmake function
+ cache = CMakeExecutor.class_cmake_cache
+ key = self._cache_key(args, build_dir, env)
+ if key not in cache:
+ cache[key] = self._call_impl(args, build_dir, env)
+ return cache[key]
+
+ def found(self) -> bool:
+ return self.cmakebin is not None
+
+ def version(self) -> str:
+ return self.cmakevers
+
+ def executable_path(self) -> str:
+ return self.cmakebin.get_path()
+
+ def get_command(self) -> T.List[str]:
+ return self.cmakebin.get_command()
+
+ def get_cmake_prefix_paths(self) -> T.List[str]:
+ return self.prefix_paths
+
+ def machine_choice(self) -> MachineChoice:
+ return self.for_machine
diff --git a/meson/mesonbuild/cmake/fileapi.py b/meson/mesonbuild/cmake/fileapi.py
new file mode 100644
index 000000000..5d4d01a13
--- /dev/null
+++ b/meson/mesonbuild/cmake/fileapi.py
@@ -0,0 +1,320 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .common import CMakeException, CMakeBuildFile, CMakeConfiguration
+import typing as T
+from .. import mlog
+from pathlib import Path
+import json
+import re
+
+STRIP_KEYS = ['cmake', 'reply', 'backtrace', 'backtraceGraph', 'version']
+
+class CMakeFileAPI:
+ def __init__(self, build_dir: Path):
+ self.build_dir = build_dir
+ self.api_base_dir = self.build_dir / '.cmake' / 'api' / 'v1'
+ self.request_dir = self.api_base_dir / 'query' / 'client-meson'
+ self.reply_dir = self.api_base_dir / 'reply'
+ self.cmake_sources = [] # type: T.List[CMakeBuildFile]
+ self.cmake_configurations = [] # type: T.List[CMakeConfiguration]
+ self.kind_resolver_map = {
+ 'codemodel': self._parse_codemodel,
+ 'cmakeFiles': self._parse_cmakeFiles,
+ }
+
+ def get_cmake_sources(self) -> T.List[CMakeBuildFile]:
+ return self.cmake_sources
+
+ def get_cmake_configurations(self) -> T.List[CMakeConfiguration]:
+ return self.cmake_configurations
+
+ def setup_request(self) -> None:
+ self.request_dir.mkdir(parents=True, exist_ok=True)
+
+ query = {
+ 'requests': [
+ {'kind': 'codemodel', 'version': {'major': 2, 'minor': 0}},
+ {'kind': 'cmakeFiles', 'version': {'major': 1, 'minor': 0}},
+ ]
+ }
+
+ query_file = self.request_dir / 'query.json'
+ query_file.write_text(json.dumps(query, indent=2), encoding='utf-8')
+
+ def load_reply(self) -> None:
+ if not self.reply_dir.is_dir():
+ raise CMakeException('No response from the CMake file API')
+
+ root = None
+ reg_index = re.compile(r'^index-.*\.json$')
+ for i in self.reply_dir.iterdir():
+ if reg_index.match(i.name):
+ root = i
+ break
+
+ if not root:
+ raise CMakeException('Failed to find the CMake file API index')
+
+ index = self._reply_file_content(root) # Load the root index
+ index = self._strip_data(index) # Avoid loading duplicate files
+ index = self._resolve_references(index) # Load everything
+ index = self._strip_data(index) # Strip unused data (again for loaded files)
+
+ # Debug output
+ debug_json = self.build_dir / '..' / 'fileAPI.json'
+ debug_json = debug_json.resolve()
+ debug_json.write_text(json.dumps(index, indent=2), encoding='utf-8')
+ mlog.cmd_ci_include(debug_json.as_posix())
+
+ # parse the JSON
+ for i in index['objects']:
+ assert(isinstance(i, dict))
+ assert('kind' in i)
+ assert(i['kind'] in self.kind_resolver_map)
+
+ self.kind_resolver_map[i['kind']](i)
+
+ def _parse_codemodel(self, data: T.Dict[str, T.Any]) -> None:
+ assert('configurations' in data)
+ assert('paths' in data)
+
+ source_dir = data['paths']['source']
+ build_dir = data['paths']['build']
+
+ # The file API output differs quite a bit from the server
+ # output. It is more flat than the server output and makes
+ # heavy use of references. Here these references are
+ # resolved and the resulting data structure is identical
+ # to the CMake serve output.
+
+ def helper_parse_dir(dir_entry: T.Dict[str, T.Any]) -> T.Tuple[Path, Path]:
+ src_dir = Path(dir_entry.get('source', '.'))
+ bld_dir = Path(dir_entry.get('build', '.'))
+ src_dir = src_dir if src_dir.is_absolute() else source_dir / src_dir
+ bld_dir = bld_dir if bld_dir.is_absolute() else build_dir / bld_dir
+ src_dir = src_dir.resolve()
+ bld_dir = bld_dir.resolve()
+
+ return src_dir, bld_dir
+
+ def parse_sources(comp_group: T.Dict[str, T.Any], tgt: T.Dict[str, T.Any]) -> T.Tuple[T.List[Path], T.List[Path], T.List[int]]:
+ gen = []
+ src = []
+ idx = []
+
+ src_list_raw = tgt.get('sources', [])
+ for i in comp_group.get('sourceIndexes', []):
+ if i >= len(src_list_raw) or 'path' not in src_list_raw[i]:
+ continue
+ if src_list_raw[i].get('isGenerated', False):
+ gen += [Path(src_list_raw[i]['path'])]
+ else:
+ src += [Path(src_list_raw[i]['path'])]
+ idx += [i]
+
+ return src, gen, idx
+
+ def parse_target(tgt: T.Dict[str, T.Any]) -> T.Dict[str, T.Any]:
+ src_dir, bld_dir = helper_parse_dir(cnf.get('paths', {}))
+
+ # Parse install paths (if present)
+ install_paths = []
+ if 'install' in tgt:
+ prefix = Path(tgt['install']['prefix']['path'])
+ install_paths = [prefix / x['path'] for x in tgt['install']['destinations']]
+ install_paths = list(set(install_paths))
+
+ # On the first look, it looks really nice that the CMake devs have
+ # decided to use arrays for the linker flags. However, this feeling
+ # soon turns into despair when you realize that there only one entry
+ # per type in most cases, and we still have to do manual string splitting.
+ link_flags = []
+ link_libs = []
+ for i in tgt.get('link', {}).get('commandFragments', []):
+ if i['role'] == 'flags':
+ link_flags += [i['fragment']]
+ elif i['role'] == 'libraries':
+ link_libs += [i['fragment']]
+ elif i['role'] == 'libraryPath':
+ link_flags += ['-L{}'.format(i['fragment'])]
+ elif i['role'] == 'frameworkPath':
+ link_flags += ['-F{}'.format(i['fragment'])]
+ for i in tgt.get('archive', {}).get('commandFragments', []):
+ if i['role'] == 'flags':
+ link_flags += [i['fragment']]
+
+ # TODO The `dependencies` entry is new in the file API.
+ # maybe we can make use of that in addition to the
+ # implicit dependency detection
+ tgt_data = {
+ 'artifacts': [Path(x.get('path', '')) for x in tgt.get('artifacts', [])],
+ 'sourceDirectory': src_dir,
+ 'buildDirectory': bld_dir,
+ 'name': tgt.get('name', ''),
+ 'fullName': tgt.get('nameOnDisk', ''),
+ 'hasInstallRule': 'install' in tgt,
+ 'installPaths': install_paths,
+ 'linkerLanguage': tgt.get('link', {}).get('language', 'CXX'),
+ 'linkLibraries': ' '.join(link_libs), # See previous comment block why we join the array
+ 'linkFlags': ' '.join(link_flags), # See previous comment block why we join the array
+ 'type': tgt.get('type', 'EXECUTABLE'),
+ 'fileGroups': [],
+ }
+
+ processed_src_idx = []
+ for cg in tgt.get('compileGroups', []):
+ # Again, why an array, when there is usually only one element
+ # and arguments are separated with spaces...
+ flags = []
+ for i in cg.get('compileCommandFragments', []):
+ flags += [i['fragment']]
+
+ cg_data = {
+ 'defines': [x.get('define', '') for x in cg.get('defines', [])],
+ 'compileFlags': ' '.join(flags),
+ 'language': cg.get('language', 'C'),
+ 'isGenerated': None, # Set later, flag is stored per source file
+ 'sources': [],
+ 'includePath': cg.get('includes', []),
+ }
+
+ normal_src, generated_src, src_idx = parse_sources(cg, tgt)
+ if normal_src:
+ cg_data = dict(cg_data)
+ cg_data['isGenerated'] = False
+ cg_data['sources'] = normal_src
+ tgt_data['fileGroups'] += [cg_data]
+ if generated_src:
+ cg_data = dict(cg_data)
+ cg_data['isGenerated'] = True
+ cg_data['sources'] = generated_src
+ tgt_data['fileGroups'] += [cg_data]
+ processed_src_idx += src_idx
+
+ # Object libraries have no compile groups, only source groups.
+ # So we add all the source files to a dummy source group that were
+ # not found in the previous loop
+ normal_src = []
+ generated_src = []
+ for idx, src in enumerate(tgt.get('sources', [])):
+ if idx in processed_src_idx:
+ continue
+
+ if src.get('isGenerated', False):
+ generated_src += [src['path']]
+ else:
+ normal_src += [src['path']]
+
+ if normal_src:
+ tgt_data['fileGroups'] += [{
+ 'isGenerated': False,
+ 'sources': normal_src,
+ }]
+ if generated_src:
+ tgt_data['fileGroups'] += [{
+ 'isGenerated': True,
+ 'sources': generated_src,
+ }]
+ return tgt_data
+
+ def parse_project(pro: T.Dict[str, T.Any]) -> T.Dict[str, T.Any]:
+ # Only look at the first directory specified in directoryIndexes
+ # TODO Figure out what the other indexes are there for
+ p_src_dir = source_dir
+ p_bld_dir = build_dir
+ try:
+ p_src_dir, p_bld_dir = helper_parse_dir(cnf['directories'][pro['directoryIndexes'][0]])
+ except (IndexError, KeyError):
+ pass
+
+ pro_data = {
+ 'name': pro.get('name', ''),
+ 'sourceDirectory': p_src_dir,
+ 'buildDirectory': p_bld_dir,
+ 'targets': [],
+ }
+
+ for ref in pro.get('targetIndexes', []):
+ tgt = {}
+ try:
+ tgt = cnf['targets'][ref]
+ except (IndexError, KeyError):
+ pass
+ pro_data['targets'] += [parse_target(tgt)]
+
+ return pro_data
+
+ for cnf in data.get('configurations', []):
+ cnf_data = {
+ 'name': cnf.get('name', ''),
+ 'projects': [],
+ }
+
+ for pro in cnf.get('projects', []):
+ cnf_data['projects'] += [parse_project(pro)]
+
+ self.cmake_configurations += [CMakeConfiguration(cnf_data)]
+
+ def _parse_cmakeFiles(self, data: T.Dict[str, T.Any]) -> None:
+ assert 'inputs' in data
+ assert 'paths' in data
+
+ src_dir = Path(data['paths']['source'])
+
+ for i in data['inputs']:
+ path = Path(i['path'])
+ path = path if path.is_absolute() else src_dir / path
+ self.cmake_sources += [CMakeBuildFile(path, i.get('isCMake', False), i.get('isGenerated', False))]
+
+ def _strip_data(self, data: T.Any) -> T.Any:
+ if isinstance(data, list):
+ for idx, i in enumerate(data):
+ data[idx] = self._strip_data(i)
+
+ elif isinstance(data, dict):
+ new = {}
+ for key, val in data.items():
+ if key not in STRIP_KEYS:
+ new[key] = self._strip_data(val)
+ data = new
+
+ return data
+
+ def _resolve_references(self, data: T.Any) -> T.Any:
+ if isinstance(data, list):
+ for idx, i in enumerate(data):
+ data[idx] = self._resolve_references(i)
+
+ elif isinstance(data, dict):
+ # Check for the "magic" reference entry and insert
+ # it into the root data dict
+ if 'jsonFile' in data:
+ data.update(self._reply_file_content(data['jsonFile']))
+
+ for key, val in data.items():
+ data[key] = self._resolve_references(val)
+
+ return data
+
+ def _reply_file_content(self, filename: Path) -> T.Dict[str, T.Any]:
+ real_path = self.reply_dir / filename
+ if not real_path.exists():
+ raise CMakeException(f'File "{real_path}" does not exist')
+
+ data = json.loads(real_path.read_text(encoding='utf-8'))
+ assert isinstance(data, dict)
+ for i in data.keys():
+ assert isinstance(i, str)
+ return data
diff --git a/meson/mesonbuild/cmake/generator.py b/meson/mesonbuild/cmake/generator.py
new file mode 100644
index 000000000..848fdf944
--- /dev/null
+++ b/meson/mesonbuild/cmake/generator.py
@@ -0,0 +1,134 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mesonlib
+import typing as T
+
+def parse_generator_expressions(raw: str) -> str:
+ '''Parse CMake generator expressions
+
+ Most generator expressions are simply ignored for
+ simplicety, however some are required for some common
+ use cases.
+ '''
+
+ # Early abort if no generator expression present
+ if '$<' not in raw:
+ return raw
+
+ out = '' # type: str
+ i = 0 # type: int
+
+ def equal(arg: str) -> str:
+ col_pos = arg.find(',')
+ if col_pos < 0:
+ return '0'
+ else:
+ return '1' if arg[:col_pos] == arg[col_pos + 1:] else '0'
+
+ def vers_comp(op: str, arg: str) -> str:
+ col_pos = arg.find(',')
+ if col_pos < 0:
+ return '0'
+ else:
+ return '1' if mesonlib.version_compare(arg[:col_pos], '{}{}'.format(op, arg[col_pos + 1:])) else '0'
+
+ supported = {
+ # Boolean functions
+ 'BOOL': lambda x: '0' if x.upper() in ['0', 'FALSE', 'OFF', 'N', 'NO', 'IGNORE', 'NOTFOUND'] or x.endswith('-NOTFOUND') else '1',
+ 'AND': lambda x: '1' if all([y == '1' for y in x.split(',')]) else '0',
+ 'OR': lambda x: '1' if any([y == '1' for y in x.split(',')]) else '0',
+ 'NOT': lambda x: '0' if x == '1' else '1',
+
+ '0': lambda x: '',
+ '1': lambda x: x,
+
+ # String operations
+ 'STREQUAL': equal,
+ 'EQUAL': equal,
+ 'VERSION_LESS': lambda x: vers_comp('<', x),
+ 'VERSION_GREATER': lambda x: vers_comp('>', x),
+ 'VERSION_EQUAL': lambda x: vers_comp('=', x),
+ 'VERSION_LESS_EQUAL': lambda x: vers_comp('<=', x),
+ 'VERSION_GREATER_EQUAL': lambda x: vers_comp('>=', x),
+
+ # String modification
+ 'LOWER_CASE': lambda x: x.lower(),
+ 'UPPER_CASE': lambda x: x.upper(),
+
+ # Always assume the BUILD_INTERFACE is valid.
+ # INSTALL_INTERFACE is always invalid for subprojects and
+ # it should also never appear in CMake config files, used
+ # for dependencies
+ 'INSTALL_INTERFACE': lambda x: '',
+ 'BUILD_INTERFACE': lambda x: x,
+
+ # Constants
+ 'ANGLE-R': lambda x: '>',
+ 'COMMA': lambda x: ',',
+ 'SEMICOLON': lambda x: ';',
+ } # type: T.Dict[str, T.Callable[[str], str]]
+
+ # Recursively evaluate generator expressions
+ def eval_generator_expressions() -> str:
+ nonlocal i
+ i += 2
+
+ func = '' # type: str
+ args = '' # type: str
+ res = '' # type: str
+ exp = '' # type: str
+
+ # Determine the body of the expression
+ while i < len(raw):
+ if raw[i] == '>':
+ # End of the generator expression
+ break
+ elif i < len(raw) - 1 and raw[i] == '$' and raw[i + 1] == '<':
+ # Nested generator expression
+ exp += eval_generator_expressions()
+ else:
+ # Generator expression body
+ exp += raw[i]
+
+ i += 1
+
+ # Split the expression into a function and arguments part
+ col_pos = exp.find(':')
+ if col_pos < 0:
+ func = exp
+ else:
+ func = exp[:col_pos]
+ args = exp[col_pos + 1:]
+
+ func = func.strip()
+ args = args.strip()
+
+ # Evaluate the function
+ if func in supported:
+ res = supported[func](args)
+
+ return res
+
+ while i < len(raw):
+ if i < len(raw) - 1 and raw[i] == '$' and raw[i + 1] == '<':
+ # Generator expression detected --> try resolving it
+ out += eval_generator_expressions()
+ else:
+ # Normal string, leave unchanged
+ out += raw[i]
+
+ i += 1
+
+ return out
diff --git a/meson/mesonbuild/cmake/interpreter.py b/meson/mesonbuild/cmake/interpreter.py
new file mode 100644
index 000000000..fe66becb9
--- /dev/null
+++ b/meson/mesonbuild/cmake/interpreter.py
@@ -0,0 +1,1369 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+from .common import CMakeException, CMakeTarget, TargetOptions, CMakeConfiguration, language_map, backend_generator_map, cmake_get_generator_args, check_cmake_args
+from .client import CMakeClient, RequestCMakeInputs, RequestConfigure, RequestCompute, RequestCodeModel, ReplyCMakeInputs, ReplyCodeModel
+from .fileapi import CMakeFileAPI
+from .executor import CMakeExecutor
+from .toolchain import CMakeToolchain, CMakeExecScope
+from .traceparser import CMakeTraceParser, CMakeGeneratorTarget
+from .. import mlog, mesonlib
+from ..mesonlib import MachineChoice, OrderedSet, version_compare, path_is_in_root, relative_to_if_possible, OptionKey
+from ..mesondata import mesondata
+from ..compilers.compilers import assembler_suffixes, lang_suffixes, header_suffixes, obj_suffixes, lib_suffixes, is_header
+from ..programs import ExternalProgram
+from ..coredata import FORBIDDEN_TARGET_NAMES
+from enum import Enum
+from functools import lru_cache
+from pathlib import Path
+import typing as T
+import re
+from os import environ
+
+from ..mparser import (
+ Token,
+ BaseNode,
+ CodeBlockNode,
+ FunctionNode,
+ ArrayNode,
+ ArgumentNode,
+ AssignmentNode,
+ BooleanNode,
+ StringNode,
+ IdNode,
+ IndexNode,
+ MethodNode,
+ NumberNode,
+)
+
+
+if T.TYPE_CHECKING:
+ from .._typing import ImmutableListProtocol
+ from ..build import Build
+ from ..backend.backends import Backend
+ from ..environment import Environment
+
+TYPE_mixed = T.Union[str, int, bool, Path, BaseNode]
+TYPE_mixed_list = T.Union[TYPE_mixed, T.Sequence[TYPE_mixed]]
+TYPE_mixed_kwargs = T.Dict[str, TYPE_mixed_list]
+
+# Disable all warnings automaticall enabled with --trace and friends
+# See https://cmake.org/cmake/help/latest/variable/CMAKE_POLICY_WARNING_CMPNNNN.html
+disable_policy_warnings = [
+ 'CMP0025',
+ 'CMP0047',
+ 'CMP0056',
+ 'CMP0060',
+ 'CMP0065',
+ 'CMP0066',
+ 'CMP0067',
+ 'CMP0082',
+ 'CMP0089',
+ 'CMP0102',
+]
+
+target_type_map = {
+ 'STATIC_LIBRARY': 'static_library',
+ 'MODULE_LIBRARY': 'shared_module',
+ 'SHARED_LIBRARY': 'shared_library',
+ 'EXECUTABLE': 'executable',
+ 'OBJECT_LIBRARY': 'static_library',
+ 'INTERFACE_LIBRARY': 'header_only'
+}
+
+skip_targets = ['UTILITY']
+
+blacklist_compiler_flags = [
+ '-Wall', '-Wextra', '-Weverything', '-Werror', '-Wpedantic', '-pedantic', '-w',
+ '/W1', '/W2', '/W3', '/W4', '/Wall', '/WX', '/w',
+ '/O1', '/O2', '/Ob', '/Od', '/Og', '/Oi', '/Os', '/Ot', '/Ox', '/Oy', '/Ob0',
+ '/RTC1', '/RTCc', '/RTCs', '/RTCu',
+ '/Z7', '/Zi', '/ZI',
+]
+
+blacklist_link_flags = [
+ '/machine:x64', '/machine:x86', '/machine:arm', '/machine:ebc',
+ '/debug', '/debug:fastlink', '/debug:full', '/debug:none',
+ '/incremental',
+]
+
+blacklist_clang_cl_link_flags = ['/GR', '/EHsc', '/MDd', '/Zi', '/RTC1']
+
+blacklist_link_libs = [
+ 'kernel32.lib',
+ 'user32.lib',
+ 'gdi32.lib',
+ 'winspool.lib',
+ 'shell32.lib',
+ 'ole32.lib',
+ 'oleaut32.lib',
+ 'uuid.lib',
+ 'comdlg32.lib',
+ 'advapi32.lib'
+]
+
+transfer_dependencies_from = ['header_only']
+
+_cmake_name_regex = re.compile(r'[^_a-zA-Z0-9]')
+def _sanitize_cmake_name(name: str) -> str:
+ name = _cmake_name_regex.sub('_', name)
+ if name in FORBIDDEN_TARGET_NAMES or name.startswith('meson'):
+ name = 'cm_' + name
+ return name
+
+class OutputTargetMap:
+ rm_so_version = re.compile(r'(\.[0-9]+)+$')
+
+ def __init__(self, build_dir: Path):
+ self.tgt_map = {} # type: T.Dict[str, T.Union['ConverterTarget', 'ConverterCustomTarget']]
+ self.build_dir = build_dir
+
+ def add(self, tgt: T.Union['ConverterTarget', 'ConverterCustomTarget']) -> None:
+ def assign_keys(keys: T.List[str]) -> None:
+ for i in [x for x in keys if x]:
+ self.tgt_map[i] = tgt
+ keys = [self._target_key(tgt.cmake_name)]
+ if isinstance(tgt, ConverterTarget):
+ keys += [tgt.full_name]
+ keys += [self._rel_artifact_key(x) for x in tgt.artifacts]
+ keys += [self._base_artifact_key(x) for x in tgt.artifacts]
+ if isinstance(tgt, ConverterCustomTarget):
+ keys += [self._rel_generated_file_key(x) for x in tgt.original_outputs]
+ keys += [self._base_generated_file_key(x) for x in tgt.original_outputs]
+ assign_keys(keys)
+
+ def _return_first_valid_key(self, keys: T.List[str]) -> T.Optional[T.Union['ConverterTarget', 'ConverterCustomTarget']]:
+ for i in keys:
+ if i and i in self.tgt_map:
+ return self.tgt_map[i]
+ return None
+
+ def target(self, name: str) -> T.Optional[T.Union['ConverterTarget', 'ConverterCustomTarget']]:
+ return self._return_first_valid_key([self._target_key(name)])
+
+ def executable(self, name: str) -> T.Optional['ConverterTarget']:
+ tgt = self.target(name)
+ if tgt is None or not isinstance(tgt, ConverterTarget):
+ return None
+ if tgt.meson_func() != 'executable':
+ return None
+ return tgt
+
+ def artifact(self, name: str) -> T.Optional[T.Union['ConverterTarget', 'ConverterCustomTarget']]:
+ keys = []
+ candidates = [name, OutputTargetMap.rm_so_version.sub('', name)]
+ for i in lib_suffixes:
+ if not name.endswith('.' + i):
+ continue
+ new_name = name[:-len(i) - 1]
+ new_name = OutputTargetMap.rm_so_version.sub('', new_name)
+ candidates += [f'{new_name}.{i}']
+ for i in candidates:
+ keys += [self._rel_artifact_key(Path(i)), Path(i).name, self._base_artifact_key(Path(i))]
+ return self._return_first_valid_key(keys)
+
+ def generated(self, name: Path) -> T.Optional['ConverterCustomTarget']:
+ res = self._return_first_valid_key([self._rel_generated_file_key(name), self._base_generated_file_key(name)])
+ assert res is None or isinstance(res, ConverterCustomTarget)
+ return res
+
+ # Utility functions to generate local keys
+ def _rel_path(self, fname: Path) -> T.Optional[Path]:
+ try:
+ return fname.resolve().relative_to(self.build_dir)
+ except ValueError:
+ pass
+ return None
+
+ def _target_key(self, tgt_name: str) -> str:
+ return f'__tgt_{tgt_name}__'
+
+ def _rel_generated_file_key(self, fname: Path) -> T.Optional[str]:
+ path = self._rel_path(fname)
+ return f'__relgen_{path.as_posix()}__' if path else None
+
+ def _base_generated_file_key(self, fname: Path) -> str:
+ return f'__gen_{fname.name}__'
+
+ def _rel_artifact_key(self, fname: Path) -> T.Optional[str]:
+ path = self._rel_path(fname)
+ return f'__relart_{path.as_posix()}__' if path else None
+
+ def _base_artifact_key(self, fname: Path) -> str:
+ return f'__art_{fname.name}__'
+
+class ConverterTarget:
+ def __init__(self, target: CMakeTarget, env: 'Environment', for_machine: MachineChoice) -> None:
+ self.env = env
+ self.for_machine = for_machine
+ self.artifacts = target.artifacts
+ self.src_dir = target.src_dir
+ self.build_dir = target.build_dir
+ self.name = target.name
+ self.cmake_name = target.name
+ self.full_name = target.full_name
+ self.type = target.type
+ self.install = target.install
+ self.install_dir = None # type: T.Optional[Path]
+ self.link_libraries = target.link_libraries
+ self.link_flags = target.link_flags + target.link_lang_flags
+ self.depends_raw = [] # type: T.List[str]
+ self.depends = [] # type: T.List[T.Union[ConverterTarget, ConverterCustomTarget]]
+
+ if target.install_paths:
+ self.install_dir = target.install_paths[0]
+
+ self.languages = set() # type: T.Set[str]
+ self.sources = [] # type: T.List[Path]
+ self.generated = [] # type: T.List[Path]
+ self.generated_ctgt = [] # type: T.List[CustomTargetReference]
+ self.includes = [] # type: T.List[Path]
+ self.sys_includes = [] # type: T.List[Path]
+ self.link_with = [] # type: T.List[T.Union[ConverterTarget, ConverterCustomTarget]]
+ self.object_libs = [] # type: T.List[ConverterTarget]
+ self.compile_opts = {} # type: T.Dict[str, T.List[str]]
+ self.public_compile_opts = [] # type: T.List[str]
+ self.pie = False
+
+ # Project default override options (c_std, cpp_std, etc.)
+ self.override_options = [] # type: T.List[str]
+
+ # Convert the target name to a valid meson target name
+ self.name = _sanitize_cmake_name(self.name)
+
+ self.generated_raw = [] # type: T.List[Path]
+
+ for i in target.files:
+ languages = set() # type: T.Set[str]
+ src_suffixes = set() # type: T.Set[str]
+
+ # Insert suffixes
+ for j in i.sources:
+ if not j.suffix:
+ continue
+ src_suffixes.add(j.suffix[1:])
+
+ # Determine the meson language(s)
+ # Extract the default language from the explicit CMake field
+ lang_cmake_to_meson = {val.lower(): key for key, val in language_map.items()}
+ languages.add(lang_cmake_to_meson.get(i.language.lower(), 'c'))
+
+ # Determine missing languages from the source suffixes
+ for sfx in src_suffixes:
+ for key, val in lang_suffixes.items():
+ if sfx in val:
+ languages.add(key)
+ break
+
+ # Register the new languages and initialize the compile opts array
+ for lang in languages:
+ self.languages.add(lang)
+ if lang not in self.compile_opts:
+ self.compile_opts[lang] = []
+
+ # Add arguments, but avoid duplicates
+ args = i.flags
+ args += [f'-D{x}' for x in i.defines]
+ for lang in languages:
+ self.compile_opts[lang] += [x for x in args if x not in self.compile_opts[lang]]
+
+ # Handle include directories
+ self.includes += [x.path for x in i.includes if x.path not in self.includes and not x.isSystem]
+ self.sys_includes += [x.path for x in i.includes if x.path not in self.sys_includes and x.isSystem]
+
+ # Add sources to the right array
+ if i.is_generated:
+ self.generated_raw += i.sources
+ else:
+ self.sources += i.sources
+
+ def __repr__(self) -> str:
+ return f'<{self.__class__.__name__}: {self.name}>'
+
+ std_regex = re.compile(r'([-]{1,2}std=|/std:v?|[-]{1,2}std:)(.*)')
+
+ def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: Path, subdir: Path, install_prefix: Path, trace: CMakeTraceParser) -> None:
+ # Detect setting the C and C++ standard and do additional compiler args manipulation
+ for i in ['c', 'cpp']:
+ if i not in self.compile_opts:
+ continue
+
+ temp = []
+ for j in self.compile_opts[i]:
+ m = ConverterTarget.std_regex.match(j)
+ ctgt = output_target_map.generated(Path(j))
+ if m:
+ std = m.group(2)
+ supported = self._all_lang_stds(i)
+ if std not in supported:
+ mlog.warning(
+ 'Unknown {0}_std "{1}" -> Ignoring. Try setting the project-'
+ 'level {0}_std if build errors occur. Known '
+ '{0}_stds are: {2}'.format(i, std, ' '.join(supported)),
+ once=True
+ )
+ continue
+ self.override_options += [f'{i}_std={std}']
+ elif j in ['-fPIC', '-fpic', '-fPIE', '-fpie']:
+ self.pie = True
+ elif isinstance(ctgt, ConverterCustomTarget):
+ # Sometimes projects pass generated source files as compiler
+ # flags. Add these as generated sources to ensure that the
+ # corresponding custom target is run.2
+ self.generated_raw += [Path(j)]
+ temp += [j]
+ elif j in blacklist_compiler_flags:
+ pass
+ else:
+ temp += [j]
+
+ self.compile_opts[i] = temp
+
+ # Make sure to force enable -fPIC for OBJECT libraries
+ if self.type.upper() == 'OBJECT_LIBRARY':
+ self.pie = True
+
+ # Use the CMake trace, if required
+ tgt = trace.targets.get(self.cmake_name)
+ if tgt:
+ self.depends_raw = trace.targets[self.cmake_name].depends
+
+ # TODO refactor this copy paste from CMakeDependency for future releases
+ reg_is_lib = re.compile(r'^(-l[a-zA-Z0-9_]+|-l?pthread)$')
+ to_process = [self.cmake_name]
+ processed = []
+ while len(to_process) > 0:
+ curr = to_process.pop(0)
+
+ if curr in processed or curr not in trace.targets:
+ continue
+
+ tgt = trace.targets[curr]
+ cfgs = []
+ cfg = ''
+ otherDeps = []
+ libraries = []
+ mlog.debug(str(tgt))
+
+ if 'INTERFACE_INCLUDE_DIRECTORIES' in tgt.properties:
+ self.includes += [Path(x) for x in tgt.properties['INTERFACE_INCLUDE_DIRECTORIES'] if x]
+
+ if 'INTERFACE_LINK_OPTIONS' in tgt.properties:
+ self.link_flags += [x for x in tgt.properties['INTERFACE_LINK_OPTIONS'] if x]
+
+ if 'INTERFACE_COMPILE_DEFINITIONS' in tgt.properties:
+ self.public_compile_opts += ['-D' + re.sub('^-D', '', x) for x in tgt.properties['INTERFACE_COMPILE_DEFINITIONS'] if x]
+
+ if 'INTERFACE_COMPILE_OPTIONS' in tgt.properties:
+ self.public_compile_opts += [x for x in tgt.properties['INTERFACE_COMPILE_OPTIONS'] if x]
+
+ if 'IMPORTED_CONFIGURATIONS' in tgt.properties:
+ cfgs += [x for x in tgt.properties['IMPORTED_CONFIGURATIONS'] if x]
+ cfg = cfgs[0]
+
+ if 'CONFIGURATIONS' in tgt.properties:
+ cfgs += [x for x in tgt.properties['CONFIGURATIONS'] if x]
+ cfg = cfgs[0]
+
+ is_debug = self.env.coredata.get_option(OptionKey('debug'));
+ if is_debug:
+ if 'DEBUG' in cfgs:
+ cfg = 'DEBUG'
+ elif 'RELEASE' in cfgs:
+ cfg = 'RELEASE'
+ else:
+ if 'RELEASE' in cfgs:
+ cfg = 'RELEASE'
+
+ if f'IMPORTED_IMPLIB_{cfg}' in tgt.properties:
+ libraries += [x for x in tgt.properties[f'IMPORTED_IMPLIB_{cfg}'] if x]
+ elif 'IMPORTED_IMPLIB' in tgt.properties:
+ libraries += [x for x in tgt.properties['IMPORTED_IMPLIB'] if x]
+ elif f'IMPORTED_LOCATION_{cfg}' in tgt.properties:
+ libraries += [x for x in tgt.properties[f'IMPORTED_LOCATION_{cfg}'] if x]
+ elif 'IMPORTED_LOCATION' in tgt.properties:
+ libraries += [x for x in tgt.properties['IMPORTED_LOCATION'] if x]
+
+ if 'LINK_LIBRARIES' in tgt.properties:
+ otherDeps += [x for x in tgt.properties['LINK_LIBRARIES'] if x]
+
+ if 'INTERFACE_LINK_LIBRARIES' in tgt.properties:
+ otherDeps += [x for x in tgt.properties['INTERFACE_LINK_LIBRARIES'] if x]
+
+ if f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}' in tgt.properties:
+ otherDeps += [x for x in tgt.properties[f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}'] if x]
+ elif 'IMPORTED_LINK_DEPENDENT_LIBRARIES' in tgt.properties:
+ otherDeps += [x for x in tgt.properties['IMPORTED_LINK_DEPENDENT_LIBRARIES'] if x]
+
+ for j in otherDeps:
+ if j in trace.targets:
+ to_process += [j]
+ elif reg_is_lib.match(j) or Path(j).exists():
+ libraries += [j]
+
+ for j in libraries:
+ if j not in self.link_libraries:
+ self.link_libraries += [j]
+
+ processed += [curr]
+ elif self.type.upper() not in ['EXECUTABLE', 'OBJECT_LIBRARY']:
+ mlog.warning('CMake: Target', mlog.bold(self.cmake_name), 'not found in CMake trace. This can lead to build errors')
+
+ temp = []
+ for i in self.link_libraries:
+ # Let meson handle this arcane magic
+ if ',-rpath,' in i:
+ continue
+ if not Path(i).is_absolute():
+ link_with = output_target_map.artifact(i)
+ if link_with:
+ self.link_with += [link_with]
+ continue
+
+ temp += [i]
+ self.link_libraries = temp
+
+ # Filter out files that are not supported by the language
+ supported = list(assembler_suffixes) + list(header_suffixes) + list(obj_suffixes)
+ for i in self.languages:
+ supported += list(lang_suffixes[i])
+ supported = [f'.{x}' for x in supported]
+ self.sources = [x for x in self.sources if any([x.name.endswith(y) for y in supported])]
+ self.generated_raw = [x for x in self.generated_raw if any([x.name.endswith(y) for y in supported])]
+
+ # Make paths relative
+ def rel_path(x: Path, is_header: bool, is_generated: bool) -> T.Optional[Path]:
+ if not x.is_absolute():
+ x = self.src_dir / x
+ x = x.resolve()
+ assert x.is_absolute()
+ if not x.exists() and not any([x.name.endswith(y) for y in obj_suffixes]) and not is_generated:
+ if path_is_in_root(x, Path(self.env.get_build_dir()), resolve=True):
+ x.mkdir(parents=True, exist_ok=True)
+ return x.relative_to(Path(self.env.get_build_dir()) / subdir)
+ else:
+ mlog.warning('CMake: path', mlog.bold(x.as_posix()), 'does not exist.')
+ mlog.warning(' --> Ignoring. This can lead to build errors.')
+ return None
+ if x in trace.explicit_headers:
+ return None
+ if (
+ path_is_in_root(x, Path(self.env.get_source_dir()))
+ and not (
+ path_is_in_root(x, root_src_dir) or
+ path_is_in_root(x, Path(self.env.get_build_dir()))
+ )
+ ):
+ mlog.warning('CMake: path', mlog.bold(x.as_posix()), 'is inside the root project but', mlog.bold('not'), 'inside the subproject.')
+ mlog.warning(' --> Ignoring. This can lead to build errors.')
+ return None
+ if path_is_in_root(x, Path(self.env.get_build_dir())) and is_header:
+ return x.relative_to(Path(self.env.get_build_dir()) / subdir)
+ if path_is_in_root(x, root_src_dir):
+ return x.relative_to(root_src_dir)
+ return x
+
+ build_dir_rel = self.build_dir.relative_to(Path(self.env.get_build_dir()) / subdir)
+ self.generated_raw = [rel_path(x, False, True) for x in self.generated_raw]
+ self.includes = list(OrderedSet([rel_path(x, True, False) for x in OrderedSet(self.includes)] + [build_dir_rel]))
+ self.sys_includes = list(OrderedSet([rel_path(x, True, False) for x in OrderedSet(self.sys_includes)]))
+ self.sources = [rel_path(x, False, False) for x in self.sources]
+
+ # Resolve custom targets
+ for gen_file in self.generated_raw:
+ ctgt = output_target_map.generated(gen_file)
+ if ctgt:
+ assert isinstance(ctgt, ConverterCustomTarget)
+ ref = ctgt.get_ref(gen_file)
+ assert isinstance(ref, CustomTargetReference) and ref.valid()
+ self.generated_ctgt += [ref]
+ elif gen_file is not None:
+ self.generated += [gen_file]
+
+ # Remove delete entries
+ self.includes = [x for x in self.includes if x is not None]
+ self.sys_includes = [x for x in self.sys_includes if x is not None]
+ self.sources = [x for x in self.sources if x is not None]
+
+ # Make sure '.' is always in the include directories
+ if Path('.') not in self.includes:
+ self.includes += [Path('.')]
+
+ # make install dir relative to the install prefix
+ if self.install_dir and self.install_dir.is_absolute():
+ if path_is_in_root(self.install_dir, install_prefix):
+ self.install_dir = self.install_dir.relative_to(install_prefix)
+
+ # Remove blacklisted options and libs
+ def check_flag(flag: str) -> bool:
+ if flag.lower() in blacklist_link_flags or flag in blacklist_compiler_flags + blacklist_clang_cl_link_flags:
+ return False
+ if flag.startswith('/D'):
+ return False
+ return True
+
+ self.link_libraries = [x for x in self.link_libraries if x.lower() not in blacklist_link_libs]
+ self.link_flags = [x for x in self.link_flags if check_flag(x)]
+
+ # Handle OSX frameworks
+ def handle_frameworks(flags: T.List[str]) -> T.List[str]:
+ res: T.List[str] = []
+ for i in flags:
+ p = Path(i)
+ if not p.exists() or not p.name.endswith('.framework'):
+ res += [i]
+ continue
+ res += ['-framework', p.stem]
+ return res
+
+ self.link_libraries = handle_frameworks(self.link_libraries)
+ self.link_flags = handle_frameworks(self.link_flags)
+
+ # Handle explicit CMake add_dependency() calls
+ for i in self.depends_raw:
+ dep_tgt = output_target_map.target(i)
+ if dep_tgt:
+ self.depends.append(dep_tgt)
+
+ def process_object_libs(self, obj_target_list: T.List['ConverterTarget'], linker_workaround: bool) -> None:
+ # Try to detect the object library(s) from the generated input sources
+ temp = [x for x in self.generated if any([x.name.endswith('.' + y) for y in obj_suffixes])]
+ stem = [x.stem for x in temp]
+ exts = self._all_source_suffixes()
+ # Temp now stores the source filenames of the object files
+ for i in obj_target_list:
+ source_files = [x.name for x in i.sources + i.generated]
+ for j in stem:
+ # On some platforms (specifically looking at you Windows with vs20xy backend) CMake does
+ # not produce object files with the format `foo.cpp.obj`, instead it skipps the language
+ # suffix and just produces object files like `foo.obj`. Thus we have to do our best to
+ # undo this step and guess the correct language suffix of the object file. This is done
+ # by trying all language suffixes meson knows and checking if one of them fits.
+ candidates = [j] # type: T.List[str]
+ if not any([j.endswith('.' + x) for x in exts]):
+ mlog.warning('Object files do not contain source file extensions, thus falling back to guessing them.', once=True)
+ candidates += [f'{j}.{x}' for x in exts]
+ if any([x in source_files for x in candidates]):
+ if linker_workaround:
+ self._append_objlib_sources(i)
+ else:
+ self.includes += i.includes
+ self.includes = list(OrderedSet(self.includes))
+ self.object_libs += [i]
+ break
+
+ # Filter out object files from the sources
+ self.generated = [x for x in self.generated if not any([x.name.endswith('.' + y) for y in obj_suffixes])]
+
+ def _append_objlib_sources(self, tgt: 'ConverterTarget') -> None:
+ self.includes += tgt.includes
+ self.sources += tgt.sources
+ self.generated += tgt.generated
+ self.generated_ctgt += tgt.generated_ctgt
+ self.includes = list(OrderedSet(self.includes))
+ self.sources = list(OrderedSet(self.sources))
+ self.generated = list(OrderedSet(self.generated))
+ self.generated_ctgt = list(OrderedSet(self.generated_ctgt))
+
+ # Inherit compiler arguments since they may be required for building
+ for lang, opts in tgt.compile_opts.items():
+ if lang not in self.compile_opts:
+ self.compile_opts[lang] = []
+ self.compile_opts[lang] += [x for x in opts if x not in self.compile_opts[lang]]
+
+ @lru_cache(maxsize=None)
+ def _all_source_suffixes(self) -> 'ImmutableListProtocol[str]':
+ suffixes = [] # type: T.List[str]
+ for exts in lang_suffixes.values():
+ suffixes += [x for x in exts]
+ return suffixes
+
+ @lru_cache(maxsize=None)
+ def _all_lang_stds(self, lang: str) -> 'ImmutableListProtocol[str]':
+ try:
+ res = self.env.coredata.options[OptionKey('std', machine=MachineChoice.BUILD, lang=lang)].choices
+ except KeyError:
+ return []
+
+ # TODO: Get rid of this once we have proper typing for options
+ assert isinstance(res, list)
+ for i in res:
+ assert isinstance(i, str)
+
+ return res
+
+ def process_inter_target_dependencies(self) -> None:
+ # Move the dependencies from all transfer_dependencies_from to the target
+ to_process = list(self.depends)
+ processed = []
+ new_deps = []
+ for i in to_process:
+ processed += [i]
+ if isinstance(i, ConverterTarget) and i.meson_func() in transfer_dependencies_from:
+ to_process += [x for x in i.depends if x not in processed]
+ else:
+ new_deps += [i]
+ self.depends = list(OrderedSet(new_deps))
+
+ def cleanup_dependencies(self) -> None:
+ # Clear the dependencies from targets that where moved from
+ if self.meson_func() in transfer_dependencies_from:
+ self.depends = []
+
+ def meson_func(self) -> str:
+ return target_type_map.get(self.type.upper())
+
+ def log(self) -> None:
+ mlog.log('Target', mlog.bold(self.name), f'({self.cmake_name})')
+ mlog.log(' -- artifacts: ', mlog.bold(str(self.artifacts)))
+ mlog.log(' -- full_name: ', mlog.bold(self.full_name))
+ mlog.log(' -- type: ', mlog.bold(self.type))
+ mlog.log(' -- install: ', mlog.bold('true' if self.install else 'false'))
+ mlog.log(' -- install_dir: ', mlog.bold(self.install_dir.as_posix() if self.install_dir else ''))
+ mlog.log(' -- link_libraries: ', mlog.bold(str(self.link_libraries)))
+ mlog.log(' -- link_with: ', mlog.bold(str(self.link_with)))
+ mlog.log(' -- object_libs: ', mlog.bold(str(self.object_libs)))
+ mlog.log(' -- link_flags: ', mlog.bold(str(self.link_flags)))
+ mlog.log(' -- languages: ', mlog.bold(str(self.languages)))
+ mlog.log(' -- includes: ', mlog.bold(str(self.includes)))
+ mlog.log(' -- sys_includes: ', mlog.bold(str(self.sys_includes)))
+ mlog.log(' -- sources: ', mlog.bold(str(self.sources)))
+ mlog.log(' -- generated: ', mlog.bold(str(self.generated)))
+ mlog.log(' -- generated_ctgt: ', mlog.bold(str(self.generated_ctgt)))
+ mlog.log(' -- pie: ', mlog.bold('true' if self.pie else 'false'))
+ mlog.log(' -- override_opts: ', mlog.bold(str(self.override_options)))
+ mlog.log(' -- depends: ', mlog.bold(str(self.depends)))
+ mlog.log(' -- options:')
+ for key, val in self.compile_opts.items():
+ mlog.log(' -', key, '=', mlog.bold(str(val)))
+
+class CustomTargetReference:
+ def __init__(self, ctgt: 'ConverterCustomTarget', index: int) -> None:
+ self.ctgt = ctgt # type: ConverterCustomTarget
+ self.index = index # type: int
+
+ def __repr__(self) -> str:
+ if self.valid():
+ return '<{}: {} [{}]>'.format(self.__class__.__name__, self.ctgt.name, self.ctgt.outputs[self.index])
+ else:
+ return f'<{self.__class__.__name__}: INVALID REFERENCE>'
+
+ def valid(self) -> bool:
+ return self.ctgt is not None and self.index >= 0
+
+ def filename(self) -> str:
+ return self.ctgt.outputs[self.index]
+
+class ConverterCustomTarget:
+ tgt_counter = 0 # type: int
+ out_counter = 0 # type: int
+
+ def __init__(self, target: CMakeGeneratorTarget, env: 'Environment', for_machine: MachineChoice) -> None:
+ assert target.current_bin_dir is not None
+ assert target.current_src_dir is not None
+ self.name = target.name
+ if not self.name:
+ self.name = f'custom_tgt_{ConverterCustomTarget.tgt_counter}'
+ ConverterCustomTarget.tgt_counter += 1
+ self.cmake_name = str(self.name)
+ self.original_outputs = list(target.outputs)
+ self.outputs = [x.name for x in self.original_outputs]
+ self.conflict_map = {} # type: T.Dict[str, str]
+ self.command = [] # type: T.List[T.List[T.Union[str, ConverterTarget]]]
+ self.working_dir = target.working_dir
+ self.depends_raw = target.depends
+ self.inputs = [] # type: T.List[T.Union[str, CustomTargetReference]]
+ self.depends = [] # type: T.List[T.Union[ConverterTarget, ConverterCustomTarget]]
+ self.current_bin_dir = target.current_bin_dir # type: Path
+ self.current_src_dir = target.current_src_dir # type: Path
+ self.env = env
+ self.for_machine = for_machine
+ self._raw_target = target
+
+ # Convert the target name to a valid meson target name
+ self.name = _sanitize_cmake_name(self.name)
+
+ def __repr__(self) -> str:
+ return f'<{self.__class__.__name__}: {self.name} {self.outputs}>'
+
+ def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: Path, all_outputs: T.List[str], trace: CMakeTraceParser) -> None:
+ # Default the working directory to ${CMAKE_CURRENT_BINARY_DIR}
+ if self.working_dir is None:
+ self.working_dir = self.current_bin_dir
+
+ # relative paths in the working directory are always relative
+ # to ${CMAKE_CURRENT_BINARY_DIR}
+ if not self.working_dir.is_absolute():
+ self.working_dir = self.current_bin_dir / self.working_dir
+
+ # Modify the original outputs if they are relative. Again,
+ # relative paths are relative to ${CMAKE_CURRENT_BINARY_DIR}
+ def ensure_absolute(x: Path) -> Path:
+ if x.is_absolute():
+ return x
+ else:
+ return self.current_bin_dir / x
+ self.original_outputs = [ensure_absolute(x) for x in self.original_outputs]
+
+ # Ensure that there is no duplicate output in the project so
+ # that meson can handle cases where the same filename is
+ # generated in multiple directories
+ temp_outputs = [] # type: T.List[str]
+ for i in self.outputs:
+ if i in all_outputs:
+ old = str(i)
+ i = f'c{ConverterCustomTarget.out_counter}_{i}'
+ ConverterCustomTarget.out_counter += 1
+ self.conflict_map[old] = i
+ all_outputs += [i]
+ temp_outputs += [i]
+ self.outputs = temp_outputs
+
+ # Check if the command is a build target
+ commands = [] # type: T.List[T.List[T.Union[str, ConverterTarget]]]
+ for curr_cmd in self._raw_target.command:
+ assert(isinstance(curr_cmd, list))
+ cmd = [] # type: T.List[T.Union[str, ConverterTarget]]
+
+ for j in curr_cmd:
+ if not j:
+ continue
+ target = output_target_map.executable(j)
+ if target:
+ # When cross compiling, binaries have to be executed with an exe_wrapper (for instance wine for mingw-w64)
+ if self.env.exe_wrapper is not None and self.env.properties[self.for_machine].get_cmake_use_exe_wrapper():
+ assert isinstance(self.env.exe_wrapper, ExternalProgram)
+ cmd += self.env.exe_wrapper.get_command()
+ cmd += [target]
+ continue
+ elif j in trace.targets:
+ trace_tgt = trace.targets[j]
+ if trace_tgt.type == 'EXECUTABLE' and 'IMPORTED_LOCATION' in trace_tgt.properties:
+ cmd += trace_tgt.properties['IMPORTED_LOCATION']
+ continue
+ mlog.debug(f'CMake: Found invalid CMake target "{j}" --> ignoring \n{trace_tgt}')
+
+ # Fallthrough on error
+ cmd += [j]
+
+ commands += [cmd]
+ self.command = commands
+
+ # If the custom target does not declare any output, create a dummy
+ # one that can be used as dependency.
+ if not self.outputs:
+ self.outputs = [self.name + '.h']
+
+ # Check dependencies and input files
+ for i in self.depends_raw:
+ if not i:
+ continue
+ raw = Path(i)
+ art = output_target_map.artifact(i)
+ tgt = output_target_map.target(i)
+ gen = output_target_map.generated(raw)
+
+ rel_to_root = None
+ try:
+ rel_to_root = raw.relative_to(root_src_dir)
+ except ValueError:
+ rel_to_root = None
+
+ # First check for existing files. Only then check for existing
+ # targets, etc. This reduces the chance of misdetecting input files
+ # as outputs from other targets.
+ # See https://github.com/mesonbuild/meson/issues/6632
+ if not raw.is_absolute() and (self.current_src_dir / raw).exists():
+ self.inputs += [(self.current_src_dir / raw).relative_to(root_src_dir).as_posix()]
+ elif raw.is_absolute() and raw.exists() and rel_to_root is not None:
+ self.inputs += [rel_to_root.as_posix()]
+ elif art:
+ self.depends += [art]
+ elif tgt:
+ self.depends += [tgt]
+ elif gen:
+ ctgt_ref = gen.get_ref(raw)
+ assert ctgt_ref is not None
+ self.inputs += [ctgt_ref]
+
+ def process_inter_target_dependencies(self) -> None:
+ # Move the dependencies from all transfer_dependencies_from to the target
+ to_process = list(self.depends)
+ processed = []
+ new_deps = []
+ for i in to_process:
+ processed += [i]
+ if isinstance(i, ConverterTarget) and i.meson_func() in transfer_dependencies_from:
+ to_process += [x for x in i.depends if x not in processed]
+ else:
+ new_deps += [i]
+ self.depends = list(OrderedSet(new_deps))
+
+ def get_ref(self, fname: Path) -> T.Optional[CustomTargetReference]:
+ name = fname.name
+ try:
+ if name in self.conflict_map:
+ name = self.conflict_map[name]
+ idx = self.outputs.index(name)
+ return CustomTargetReference(self, idx)
+ except ValueError:
+ return None
+
+ def log(self) -> None:
+ mlog.log('Custom Target', mlog.bold(self.name), f'({self.cmake_name})')
+ mlog.log(' -- command: ', mlog.bold(str(self.command)))
+ mlog.log(' -- outputs: ', mlog.bold(str(self.outputs)))
+ mlog.log(' -- conflict_map: ', mlog.bold(str(self.conflict_map)))
+ mlog.log(' -- working_dir: ', mlog.bold(str(self.working_dir)))
+ mlog.log(' -- depends_raw: ', mlog.bold(str(self.depends_raw)))
+ mlog.log(' -- inputs: ', mlog.bold(str(self.inputs)))
+ mlog.log(' -- depends: ', mlog.bold(str(self.depends)))
+
+class CMakeAPI(Enum):
+ SERVER = 1
+ FILE = 2
+
+class CMakeInterpreter:
+ def __init__(self, build: 'Build', subdir: Path, src_dir: Path, install_prefix: Path, env: 'Environment', backend: 'Backend'):
+ self.build = build
+ self.subdir = subdir
+ self.src_dir = src_dir
+ self.build_dir_rel = subdir / '__CMake_build'
+ self.build_dir = Path(env.get_build_dir()) / self.build_dir_rel
+ self.install_prefix = install_prefix
+ self.env = env
+ self.for_machine = MachineChoice.HOST # TODO make parameter
+ self.backend_name = backend.name
+ self.linkers = set() # type: T.Set[str]
+ self.cmake_api = CMakeAPI.SERVER
+ self.client = CMakeClient(self.env)
+ self.fileapi = CMakeFileAPI(self.build_dir)
+
+ # Raw CMake results
+ self.bs_files = [] # type: T.List[Path]
+ self.codemodel_configs = None # type: T.Optional[T.List[CMakeConfiguration]]
+ self.raw_trace = None # type: T.Optional[str]
+
+ # Analysed data
+ self.project_name = ''
+ self.languages = [] # type: T.List[str]
+ self.targets = [] # type: T.List[ConverterTarget]
+ self.custom_targets = [] # type: T.List[ConverterCustomTarget]
+ self.trace = CMakeTraceParser('', Path('.')) # Will be replaced in analyse
+ self.output_target_map = OutputTargetMap(self.build_dir)
+
+ # Generated meson data
+ self.generated_targets = {} # type: T.Dict[str, T.Dict[str, T.Optional[str]]]
+ self.internal_name_map = {} # type: T.Dict[str, str]
+
+ # Do some special handling for object libraries for certain configurations
+ self._object_lib_workaround = False
+ if self.backend_name.startswith('vs'):
+ for comp in self.env.coredata.compilers[self.for_machine].values():
+ if comp.get_linker_id() == 'link':
+ self._object_lib_workaround = True
+ break
+
+ def configure(self, extra_cmake_options: T.List[str]) -> CMakeExecutor:
+ # Find CMake
+ # TODO: Using MachineChoice.BUILD should always be correct here, but also evaluate the use of self.for_machine
+ cmake_exe = CMakeExecutor(self.env, '>=3.7', MachineChoice.BUILD)
+ if not cmake_exe.found():
+ raise CMakeException('Unable to find CMake')
+ self.trace = CMakeTraceParser(cmake_exe.version(), self.build_dir, permissive=True)
+
+ preload_file = mesondata['cmake/data/preload.cmake'].write_to_private(self.env)
+ toolchain = CMakeToolchain(cmake_exe, self.env, self.for_machine, CMakeExecScope.SUBPROJECT, self.build_dir, preload_file)
+ toolchain_file = toolchain.write()
+
+ # TODO: drop this check once the deprecated `cmake_args` kwarg is removed
+ extra_cmake_options = check_cmake_args(extra_cmake_options)
+
+ cmake_args = []
+ cmake_args += cmake_get_generator_args(self.env)
+ cmake_args += [f'-DCMAKE_INSTALL_PREFIX={self.install_prefix}']
+ cmake_args += extra_cmake_options
+ trace_args = self.trace.trace_args()
+ cmcmp_args = [f'-DCMAKE_POLICY_WARNING_{x}=OFF' for x in disable_policy_warnings]
+
+ if version_compare(cmake_exe.version(), '>=3.14'):
+ self.cmake_api = CMakeAPI.FILE
+ self.fileapi.setup_request()
+
+ # Run CMake
+ mlog.log()
+ with mlog.nested():
+ mlog.log('Configuring the build directory with', mlog.bold('CMake'), 'version', mlog.cyan(cmake_exe.version()))
+ mlog.log(mlog.bold('Running CMake with:'), ' '.join(cmake_args))
+ mlog.log(mlog.bold(' - build directory: '), self.build_dir.as_posix())
+ mlog.log(mlog.bold(' - source directory: '), self.src_dir.as_posix())
+ mlog.log(mlog.bold(' - toolchain file: '), toolchain_file.as_posix())
+ mlog.log(mlog.bold(' - preload file: '), preload_file.as_posix())
+ mlog.log(mlog.bold(' - trace args: '), ' '.join(trace_args))
+ mlog.log(mlog.bold(' - disabled policy warnings:'), '[{}]'.format(', '.join(disable_policy_warnings)))
+ mlog.log()
+ self.build_dir.mkdir(parents=True, exist_ok=True)
+ os_env = environ.copy()
+ os_env['LC_ALL'] = 'C'
+ final_args = cmake_args + trace_args + cmcmp_args + toolchain.get_cmake_args() + [self.src_dir.as_posix()]
+
+ cmake_exe.set_exec_mode(print_cmout=True, always_capture_stderr=self.trace.requires_stderr())
+ rc, _, self.raw_trace = cmake_exe.call(final_args, self.build_dir, env=os_env, disable_cache=True)
+
+ mlog.log()
+ h = mlog.green('SUCCEEDED') if rc == 0 else mlog.red('FAILED')
+ mlog.log('CMake configuration:', h)
+ if rc != 0:
+ raise CMakeException('Failed to configure the CMake subproject')
+
+ return cmake_exe
+
+ def initialise(self, extra_cmake_options: T.List[str]) -> None:
+ # Run configure the old way because doing it
+ # with the server doesn't work for some reason
+ # Additionally, the File API requires a configure anyway
+ cmake_exe = self.configure(extra_cmake_options)
+
+ # Continue with the file API If supported
+ if self.cmake_api is CMakeAPI.FILE:
+ # Parse the result
+ self.fileapi.load_reply()
+
+ # Load the buildsystem file list
+ cmake_files = self.fileapi.get_cmake_sources()
+ self.bs_files = [x.file for x in cmake_files if not x.is_cmake and not x.is_temp]
+ self.bs_files = [relative_to_if_possible(x, Path(self.env.get_source_dir())) for x in self.bs_files]
+ self.bs_files = [x for x in self.bs_files if not path_is_in_root(x, Path(self.env.get_build_dir()), resolve=True)]
+ self.bs_files = list(OrderedSet(self.bs_files))
+
+ # Load the codemodel configurations
+ self.codemodel_configs = self.fileapi.get_cmake_configurations()
+ return
+
+ with self.client.connect(cmake_exe):
+ generator = backend_generator_map[self.backend_name]
+ self.client.do_handshake(self.src_dir, self.build_dir, generator, 1)
+
+ # Do a second configure to initialise the server
+ self.client.query_checked(RequestConfigure(), 'CMake server configure')
+
+ # Generate the build system files
+ self.client.query_checked(RequestCompute(), 'Generating build system files')
+
+ # Get CMake build system files
+ bs_reply = self.client.query_checked(RequestCMakeInputs(), 'Querying build system files')
+ assert isinstance(bs_reply, ReplyCMakeInputs)
+
+ # Now get the CMake code model
+ cm_reply = self.client.query_checked(RequestCodeModel(), 'Querying the CMake code model')
+ assert isinstance(cm_reply, ReplyCodeModel)
+
+ src_dir = bs_reply.src_dir
+ self.bs_files = [x.file for x in bs_reply.build_files if not x.is_cmake and not x.is_temp]
+ self.bs_files = [relative_to_if_possible(src_dir / x, Path(self.env.get_source_dir()), resolve=True) for x in self.bs_files]
+ self.bs_files = [x for x in self.bs_files if not path_is_in_root(x, Path(self.env.get_build_dir()), resolve=True)]
+ self.bs_files = list(OrderedSet(self.bs_files))
+ self.codemodel_configs = cm_reply.configs
+
+ def analyse(self) -> None:
+ if self.codemodel_configs is None:
+ raise CMakeException('CMakeInterpreter was not initialized')
+
+ # Clear analyser data
+ self.project_name = ''
+ self.languages = []
+ self.targets = []
+ self.custom_targets = []
+
+ # Parse the trace
+ self.trace.parse(self.raw_trace)
+
+ # Find all targets
+ added_target_names = [] # type: T.List[str]
+ for i_0 in self.codemodel_configs:
+ for j_0 in i_0.projects:
+ if not self.project_name:
+ self.project_name = j_0.name
+ for k_0 in j_0.targets:
+ # Avoid duplicate targets from different configurations and known
+ # dummy CMake internal target types
+ if k_0.type not in skip_targets and k_0.name not in added_target_names:
+ added_target_names += [k_0.name]
+ self.targets += [ConverterTarget(k_0, self.env, self.for_machine)]
+
+ # Add interface targets from trace, if not already present.
+ # This step is required because interface targets were removed from
+ # the CMake file API output.
+ api_target_name_list = [x.name for x in self.targets]
+ for i_1 in self.trace.targets.values():
+ if i_1.type != 'INTERFACE' or i_1.name in api_target_name_list or i_1.imported:
+ continue
+ dummy = CMakeTarget({
+ 'name': i_1.name,
+ 'type': 'INTERFACE_LIBRARY',
+ 'sourceDirectory': self.src_dir,
+ 'buildDirectory': self.build_dir,
+ })
+ self.targets += [ConverterTarget(dummy, self.env, self.for_machine)]
+
+ for i_2 in self.trace.custom_targets:
+ self.custom_targets += [ConverterCustomTarget(i_2, self.env, self.for_machine)]
+
+ # generate the output_target_map
+ for i_3 in [*self.targets, *self.custom_targets]:
+ assert isinstance(i_3, (ConverterTarget, ConverterCustomTarget))
+ self.output_target_map.add(i_3)
+
+ # First pass: Basic target cleanup
+ object_libs = []
+ custom_target_outputs = [] # type: T.List[str]
+ for ctgt in self.custom_targets:
+ ctgt.postprocess(self.output_target_map, self.src_dir, custom_target_outputs, self.trace)
+ for tgt in self.targets:
+ tgt.postprocess(self.output_target_map, self.src_dir, self.subdir, self.install_prefix, self.trace)
+ if tgt.type == 'OBJECT_LIBRARY':
+ object_libs += [tgt]
+ self.languages += [x for x in tgt.languages if x not in self.languages]
+
+ # Second pass: Detect object library dependencies
+ for tgt in self.targets:
+ tgt.process_object_libs(object_libs, self._object_lib_workaround)
+
+ # Third pass: Reassign dependencies to avoid some loops
+ for tgt in self.targets:
+ tgt.process_inter_target_dependencies()
+ for ctgt in self.custom_targets:
+ ctgt.process_inter_target_dependencies()
+
+ # Fourth pass: Remove rassigned dependencies
+ for tgt in self.targets:
+ tgt.cleanup_dependencies()
+
+ mlog.log('CMake project', mlog.bold(self.project_name), 'has', mlog.bold(str(len(self.targets) + len(self.custom_targets))), 'build targets.')
+
+ def pretend_to_be_meson(self, options: TargetOptions) -> CodeBlockNode:
+ if not self.project_name:
+ raise CMakeException('CMakeInterpreter was not analysed')
+
+ def token(tid: str = 'string', val: TYPE_mixed = '') -> Token:
+ return Token(tid, self.subdir.as_posix(), 0, 0, 0, None, val)
+
+ def string(value: str) -> StringNode:
+ return StringNode(token(val=value))
+
+ def id_node(value: str) -> IdNode:
+ return IdNode(token(val=value))
+
+ def number(value: int) -> NumberNode:
+ return NumberNode(token(val=value))
+
+ def nodeify(value: TYPE_mixed_list) -> BaseNode:
+ if isinstance(value, str):
+ return string(value)
+ if isinstance(value, Path):
+ return string(value.as_posix())
+ elif isinstance(value, bool):
+ return BooleanNode(token(val=value))
+ elif isinstance(value, int):
+ return number(value)
+ elif isinstance(value, list):
+ return array(value)
+ elif isinstance(value, BaseNode):
+ return value
+ raise RuntimeError('invalid type of value: {} ({})'.format(type(value).__name__, str(value)))
+
+ def indexed(node: BaseNode, index: int) -> IndexNode:
+ return IndexNode(node, nodeify(index))
+
+ def array(elements: TYPE_mixed_list) -> ArrayNode:
+ args = ArgumentNode(token())
+ if not isinstance(elements, list):
+ elements = [args]
+ args.arguments += [nodeify(x) for x in elements if x is not None]
+ return ArrayNode(args, 0, 0, 0, 0)
+
+ def function(name: str, args: T.Optional[TYPE_mixed_list] = None, kwargs: T.Optional[TYPE_mixed_kwargs] = None) -> FunctionNode:
+ args = [] if args is None else args
+ kwargs = {} if kwargs is None else kwargs
+ args_n = ArgumentNode(token())
+ if not isinstance(args, list):
+ assert isinstance(args, (str, int, bool, Path, BaseNode))
+ args = [args]
+ args_n.arguments = [nodeify(x) for x in args if x is not None]
+ args_n.kwargs = {id_node(k): nodeify(v) for k, v in kwargs.items() if v is not None}
+ func_n = FunctionNode(self.subdir.as_posix(), 0, 0, 0, 0, name, args_n)
+ return func_n
+
+ def method(obj: BaseNode, name: str, args: T.Optional[TYPE_mixed_list] = None, kwargs: T.Optional[TYPE_mixed_kwargs] = None) -> MethodNode:
+ args = [] if args is None else args
+ kwargs = {} if kwargs is None else kwargs
+ args_n = ArgumentNode(token())
+ if not isinstance(args, list):
+ assert isinstance(args, (str, int, bool, Path, BaseNode))
+ args = [args]
+ args_n.arguments = [nodeify(x) for x in args if x is not None]
+ args_n.kwargs = {id_node(k): nodeify(v) for k, v in kwargs.items() if v is not None}
+ return MethodNode(self.subdir.as_posix(), 0, 0, obj, name, args_n)
+
+ def assign(var_name: str, value: BaseNode) -> AssignmentNode:
+ return AssignmentNode(self.subdir.as_posix(), 0, 0, var_name, value)
+
+ # Generate the root code block and the project function call
+ root_cb = CodeBlockNode(token())
+ root_cb.lines += [function('project', [self.project_name] + self.languages)]
+
+ # Add the run script for custom commands
+
+ # Add the targets
+ processing = [] # type: T.List[str]
+ processed = {} # type: T.Dict[str, T.Dict[str, T.Optional[str]]]
+ name_map = {} # type: T.Dict[str, str]
+
+ def extract_tgt(tgt: T.Union[ConverterTarget, ConverterCustomTarget, CustomTargetReference]) -> IdNode:
+ tgt_name = None
+ if isinstance(tgt, (ConverterTarget, ConverterCustomTarget)):
+ tgt_name = tgt.name
+ elif isinstance(tgt, CustomTargetReference):
+ tgt_name = tgt.ctgt.name
+ assert(tgt_name is not None and tgt_name in processed)
+ res_var = processed[tgt_name]['tgt']
+ return id_node(res_var) if res_var else None
+
+ def detect_cycle(tgt: T.Union[ConverterTarget, ConverterCustomTarget]) -> None:
+ if tgt.name in processing:
+ raise CMakeException('Cycle in CMake inputs/dependencies detected')
+ processing.append(tgt.name)
+
+ def resolve_ctgt_ref(ref: CustomTargetReference) -> T.Union[IdNode, IndexNode]:
+ tgt_var = extract_tgt(ref)
+ if len(ref.ctgt.outputs) == 1:
+ return tgt_var
+ else:
+ return indexed(tgt_var, ref.index)
+
+ def process_target(tgt: ConverterTarget) -> None:
+ detect_cycle(tgt)
+
+ # First handle inter target dependencies
+ link_with = [] # type: T.List[IdNode]
+ objec_libs = [] # type: T.List[IdNode]
+ sources = [] # type: T.List[Path]
+ generated = [] # type: T.List[T.Union[IdNode, IndexNode]]
+ generated_filenames = [] # type: T.List[str]
+ custom_targets = [] # type: T.List[ConverterCustomTarget]
+ dependencies = [] # type: T.List[IdNode]
+ for i in tgt.link_with:
+ assert(isinstance(i, ConverterTarget))
+ if i.name not in processed:
+ process_target(i)
+ link_with += [extract_tgt(i)]
+ for i in tgt.object_libs:
+ assert(isinstance(i, ConverterTarget))
+ if i.name not in processed:
+ process_target(i)
+ objec_libs += [extract_tgt(i)]
+ for i in tgt.depends:
+ if not isinstance(i, ConverterCustomTarget):
+ continue
+ if i.name not in processed:
+ process_custom_target(i)
+ dependencies += [extract_tgt(i)]
+
+ # Generate the source list and handle generated sources
+ sources += tgt.sources
+ sources += tgt.generated
+
+ for ctgt_ref in tgt.generated_ctgt:
+ ctgt = ctgt_ref.ctgt
+ if ctgt.name not in processed:
+ process_custom_target(ctgt)
+ generated += [resolve_ctgt_ref(ctgt_ref)]
+ generated_filenames += [ctgt_ref.filename()]
+ if ctgt not in custom_targets:
+ custom_targets += [ctgt]
+
+ # Add all header files from all used custom targets. This
+ # ensures that all custom targets are built before any
+ # sources of the current target are compiled and thus all
+ # header files are present. This step is necessary because
+ # CMake always ensures that a custom target is executed
+ # before another target if at least one output is used.
+ for ctgt in custom_targets:
+ for j in ctgt.outputs:
+ if not is_header(j) or j in generated_filenames:
+ continue
+
+ generated += [resolve_ctgt_ref(ctgt.get_ref(Path(j)))]
+ generated_filenames += [j]
+
+ # Determine the meson function to use for the build target
+ tgt_func = tgt.meson_func()
+ if not tgt_func:
+ raise CMakeException(f'Unknown target type "{tgt.type}"')
+
+ # Determine the variable names
+ inc_var = f'{tgt.name}_inc'
+ dir_var = f'{tgt.name}_dir'
+ sys_var = f'{tgt.name}_sys'
+ src_var = f'{tgt.name}_src'
+ dep_var = f'{tgt.name}_dep'
+ tgt_var = tgt.name
+
+ install_tgt = options.get_install(tgt.cmake_name, tgt.install)
+
+ # Generate target kwargs
+ tgt_kwargs = {
+ 'build_by_default': install_tgt,
+ 'link_args': options.get_link_args(tgt.cmake_name, tgt.link_flags + tgt.link_libraries),
+ 'link_with': link_with,
+ 'include_directories': id_node(inc_var),
+ 'install': install_tgt,
+ 'override_options': options.get_override_options(tgt.cmake_name, tgt.override_options),
+ 'objects': [method(x, 'extract_all_objects') for x in objec_libs],
+ } # type: TYPE_mixed_kwargs
+
+ # Only set if installed and only override if it is set
+ if install_tgt and tgt.install_dir:
+ tgt_kwargs['install_dir'] = tgt.install_dir
+
+ # Handle compiler args
+ for key, val in tgt.compile_opts.items():
+ tgt_kwargs[f'{key}_args'] = options.get_compile_args(tgt.cmake_name, key, val)
+
+ # Handle -fPCI, etc
+ if tgt_func == 'executable':
+ tgt_kwargs['pie'] = tgt.pie
+ elif tgt_func == 'static_library':
+ tgt_kwargs['pic'] = tgt.pie
+
+ # declare_dependency kwargs
+ dep_kwargs = {
+ 'link_args': tgt.link_flags + tgt.link_libraries,
+ 'link_with': id_node(tgt_var),
+ 'compile_args': tgt.public_compile_opts,
+ 'include_directories': id_node(inc_var),
+ } # type: TYPE_mixed_kwargs
+
+ if dependencies:
+ generated += dependencies
+
+ # Generate the function nodes
+ dir_node = assign(dir_var, function('include_directories', tgt.includes))
+ sys_node = assign(sys_var, function('include_directories', tgt.sys_includes, {'is_system': True}))
+ inc_node = assign(inc_var, array([id_node(dir_var), id_node(sys_var)]))
+ node_list = [dir_node, sys_node, inc_node]
+ if tgt_func == 'header_only':
+ del dep_kwargs['link_with']
+ dep_node = assign(dep_var, function('declare_dependency', kwargs=dep_kwargs))
+ node_list += [dep_node]
+ src_var = None
+ tgt_var = None
+ else:
+ src_node = assign(src_var, function('files', sources))
+ tgt_node = assign(tgt_var, function(tgt_func, [tgt_var, id_node(src_var), *generated], tgt_kwargs))
+ node_list += [src_node, tgt_node]
+ if tgt_func in ['static_library', 'shared_library']:
+ dep_node = assign(dep_var, function('declare_dependency', kwargs=dep_kwargs))
+ node_list += [dep_node]
+ elif tgt_func in ['shared_module']:
+ del dep_kwargs['link_with']
+ dep_node = assign(dep_var, function('declare_dependency', kwargs=dep_kwargs))
+ node_list += [dep_node]
+ else:
+ dep_var = None
+
+ # Add the nodes to the ast
+ root_cb.lines += node_list
+ processed[tgt.name] = {'inc': inc_var, 'src': src_var, 'dep': dep_var, 'tgt': tgt_var, 'func': tgt_func}
+ name_map[tgt.cmake_name] = tgt.name
+
+ def process_custom_target(tgt: ConverterCustomTarget) -> None:
+ # CMake allows to specify multiple commands in a custom target.
+ # To map this to meson, a helper script is used to execute all
+ # commands in order. This additionally allows setting the working
+ # directory.
+
+ detect_cycle(tgt)
+ tgt_var = tgt.name # type: str
+
+ def resolve_source(x: T.Union[str, ConverterTarget, ConverterCustomTarget, CustomTargetReference]) -> T.Union[str, IdNode, IndexNode]:
+ if isinstance(x, ConverterTarget):
+ if x.name not in processed:
+ process_target(x)
+ return extract_tgt(x)
+ if isinstance(x, ConverterCustomTarget):
+ if x.name not in processed:
+ process_custom_target(x)
+ return extract_tgt(x)
+ elif isinstance(x, CustomTargetReference):
+ if x.ctgt.name not in processed:
+ process_custom_target(x.ctgt)
+ return resolve_ctgt_ref(x)
+ else:
+ return x
+
+ # Generate the command list
+ command = [] # type: T.List[T.Union[str, IdNode, IndexNode]]
+ command += mesonlib.get_meson_command()
+ command += ['--internal', 'cmake_run_ctgt']
+ command += ['-o', '@OUTPUT@']
+ if tgt.original_outputs:
+ command += ['-O'] + [x.as_posix() for x in tgt.original_outputs]
+ command += ['-d', tgt.working_dir.as_posix()]
+
+ # Generate the commands. Subcommands are separated by ';;;'
+ for cmd in tgt.command:
+ command += [resolve_source(x) for x in cmd] + [';;;']
+
+ tgt_kwargs = {
+ 'input': [resolve_source(x) for x in tgt.inputs],
+ 'output': tgt.outputs,
+ 'command': command,
+ 'depends': [resolve_source(x) for x in tgt.depends],
+ } # type: TYPE_mixed_kwargs
+
+ root_cb.lines += [assign(tgt_var, function('custom_target', [tgt.name], tgt_kwargs))]
+ processed[tgt.name] = {'inc': None, 'src': None, 'dep': None, 'tgt': tgt_var, 'func': 'custom_target'}
+ name_map[tgt.cmake_name] = tgt.name
+
+ # Now generate the target function calls
+ for ctgt in self.custom_targets:
+ if ctgt.name not in processed:
+ process_custom_target(ctgt)
+ for tgt in self.targets:
+ if tgt.name not in processed:
+ process_target(tgt)
+
+ self.generated_targets = processed
+ self.internal_name_map = name_map
+ return root_cb
+
+ def target_info(self, target: str) -> T.Optional[T.Dict[str, str]]:
+ # Try resolving the target name
+ # start by checking if there is a 100% match (excluding the name prefix)
+ prx_tgt = _sanitize_cmake_name(target)
+ if prx_tgt in self.generated_targets:
+ return self.generated_targets[prx_tgt]
+ # check if there exists a name mapping
+ if target in self.internal_name_map:
+ target = self.internal_name_map[target]
+ assert(target in self.generated_targets)
+ return self.generated_targets[target]
+ return None
+
+ def target_list(self) -> T.List[str]:
+ return list(self.internal_name_map.keys())
diff --git a/meson/mesonbuild/cmake/toolchain.py b/meson/mesonbuild/cmake/toolchain.py
new file mode 100644
index 000000000..34b737c79
--- /dev/null
+++ b/meson/mesonbuild/cmake/toolchain.py
@@ -0,0 +1,259 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pathlib import Path
+from .traceparser import CMakeTraceParser
+from ..envconfig import CMakeSkipCompilerTest
+from ..mesonlib import MachineChoice
+from ..compilers import VisualStudioLikeCompiler
+from .common import language_map, cmake_get_generator_args
+from .. import mlog
+
+import shutil
+import typing as T
+from enum import Enum
+from textwrap import dedent
+
+if T.TYPE_CHECKING:
+ from .executor import CMakeExecutor
+ from ..envconfig import MachineInfo, Properties, CMakeVariables
+ from ..environment import Environment
+ from ..compilers import Compiler
+
+class CMakeExecScope(Enum):
+ SUBPROJECT = 'subproject'
+ DEPENDENCY = 'dependency'
+
+class CMakeToolchain:
+ def __init__(self, cmakebin: 'CMakeExecutor', env: 'Environment', for_machine: MachineChoice, exec_scope: CMakeExecScope, build_dir: Path, preload_file: T.Optional[Path] = None) -> None:
+ self.env = env
+ self.cmakebin = cmakebin
+ self.for_machine = for_machine
+ self.exec_scope = exec_scope
+ self.preload_file = preload_file
+ self.build_dir = build_dir
+ self.build_dir = self.build_dir.resolve()
+ self.toolchain_file = build_dir / 'CMakeMesonToolchainFile.cmake'
+ self.cmcache_file = build_dir / 'CMakeCache.txt'
+ self.minfo = self.env.machines[self.for_machine]
+ self.properties = self.env.properties[self.for_machine]
+ self.compilers = self.env.coredata.compilers[self.for_machine]
+ self.cmakevars = self.env.cmakevars[self.for_machine]
+ self.cmakestate = self.env.coredata.cmake_cache[self.for_machine]
+
+ self.variables = self.get_defaults()
+ self.variables.update(self.cmakevars.get_variables())
+
+ # Determine whether CMake the compiler test should be skipped
+ skip_status = self.properties.get_cmake_skip_compiler_test()
+ self.skip_check = skip_status == CMakeSkipCompilerTest.ALWAYS
+ if skip_status == CMakeSkipCompilerTest.DEP_ONLY and self.exec_scope == CMakeExecScope.DEPENDENCY:
+ self.skip_check = True
+ if not self.properties.get_cmake_defaults():
+ self.skip_check = False
+
+ assert self.toolchain_file.is_absolute()
+
+ def write(self) -> Path:
+ if not self.toolchain_file.parent.exists():
+ self.toolchain_file.parent.mkdir(parents=True)
+ self.toolchain_file.write_text(self.generate(), encoding='utf-8')
+ self.cmcache_file.write_text(self.generate_cache(), encoding='utf-8')
+ mlog.cmd_ci_include(self.toolchain_file.as_posix())
+ return self.toolchain_file
+
+ def get_cmake_args(self) -> T.List[str]:
+ args = ['-DCMAKE_TOOLCHAIN_FILE=' + self.toolchain_file.as_posix()]
+ if self.preload_file is not None:
+ args += ['-DMESON_PRELOAD_FILE=' + self.preload_file.as_posix()]
+ return args
+
+ @staticmethod
+ def _print_vars(vars: T.Dict[str, T.List[str]]) -> str:
+ res = ''
+ for key, value in vars.items():
+ res += 'set(' + key
+ for i in value:
+ res += f' "{i}"'
+ res += ')\n'
+ return res
+
+ def generate(self) -> str:
+ res = dedent('''\
+ ######################################
+ ### AUTOMATICALLY GENERATED FILE ###
+ ######################################
+
+ # This file was generated from the configuration in the
+ # relevant meson machine file. See the meson documentation
+ # https://mesonbuild.com/Machine-files.html for more information
+
+ if(DEFINED MESON_PRELOAD_FILE)
+ include("${MESON_PRELOAD_FILE}")
+ endif()
+
+ ''')
+
+ # Escape all \ in the values
+ for key, value in self.variables.items():
+ self.variables[key] = [x.replace('\\', '/') for x in value]
+
+ # Set compiler
+ if self.skip_check:
+ self.update_cmake_compiler_state()
+ res += '# CMake compiler state variables\n'
+ for lang, vars in self.cmakestate:
+ res += f'# -- Variables for language {lang}\n'
+ res += self._print_vars(vars)
+ res += '\n'
+ res += '\n'
+
+ # Set variables from the current machine config
+ res += '# Variables from meson\n'
+ res += self._print_vars(self.variables)
+ res += '\n'
+
+ # Add the user provided toolchain file
+ user_file = self.properties.get_cmake_toolchain_file()
+ if user_file is not None:
+ res += dedent('''
+ # Load the CMake toolchain file specified by the user
+ include("{}")
+
+ '''.format(user_file.as_posix()))
+
+ return res
+
+ def generate_cache(self) -> str:
+ if not self.skip_check:
+ return ''
+
+ res = ''
+ for name, v in self.cmakestate.cmake_cache.items():
+ res += f'{name}:{v.type}={";".join(v.value)}\n'
+ return res
+
+ def get_defaults(self) -> T.Dict[str, T.List[str]]:
+ defaults = {} # type: T.Dict[str, T.List[str]]
+
+ # Do nothing if the user does not want automatic defaults
+ if not self.properties.get_cmake_defaults():
+ return defaults
+
+ # Best effort to map the meson system name to CMAKE_SYSTEM_NAME, which
+ # is not trivial since CMake lacks a list of all supported
+ # CMAKE_SYSTEM_NAME values.
+ SYSTEM_MAP = {
+ 'android': 'Android',
+ 'linux': 'Linux',
+ 'windows': 'Windows',
+ 'freebsd': 'FreeBSD',
+ 'darwin': 'Darwin',
+ } # type: T.Dict[str, str]
+
+ # Only set these in a cross build. Otherwise CMake will trip up in native
+ # builds and thing they are cross (which causes TRY_RUN() to break)
+ if self.env.is_cross_build(when_building_for=self.for_machine):
+ defaults['CMAKE_SYSTEM_NAME'] = [SYSTEM_MAP.get(self.minfo.system, self.minfo.system)]
+ defaults['CMAKE_SYSTEM_PROCESSOR'] = [self.minfo.cpu_family]
+
+ defaults['CMAKE_SIZEOF_VOID_P'] = ['8' if self.minfo.is_64_bit else '4']
+
+ sys_root = self.properties.get_sys_root()
+ if sys_root:
+ defaults['CMAKE_SYSROOT'] = [sys_root]
+
+ def make_abs(exe: str) -> str:
+ if Path(exe).is_absolute():
+ return exe
+
+ p = shutil.which(exe)
+ if p is None:
+ return exe
+ return p
+
+ # Set the compiler variables
+ for lang, comp_obj in self.compilers.items():
+ prefix = 'CMAKE_{}_'.format(language_map.get(lang, lang.upper()))
+
+ exe_list = comp_obj.get_exelist()
+ if not exe_list:
+ continue
+
+ if len(exe_list) >= 2 and not self.is_cmdline_option(comp_obj, exe_list[1]):
+ defaults[prefix + 'COMPILER_LAUNCHER'] = [make_abs(exe_list[0])]
+ exe_list = exe_list[1:]
+
+ exe_list[0] = make_abs(exe_list[0])
+ defaults[prefix + 'COMPILER'] = exe_list
+ if comp_obj.get_id() == 'clang-cl':
+ defaults['CMAKE_LINKER'] = comp_obj.get_linker_exelist()
+
+ return defaults
+
+ @staticmethod
+ def is_cmdline_option(compiler: 'Compiler', arg: str) -> bool:
+ if isinstance(compiler, VisualStudioLikeCompiler):
+ return arg.startswith('/')
+ else:
+ return arg.startswith('-')
+
+ def update_cmake_compiler_state(self) -> None:
+ # Check if all variables are already cached
+ if self.cmakestate.languages.issuperset(self.compilers.keys()):
+ return
+
+ # Generate the CMakeLists.txt
+ mlog.debug('CMake Toolchain: Calling CMake once to generate the compiler state')
+ languages = list(self.compilers.keys())
+ lang_ids = [language_map.get(x, x.upper()) for x in languages]
+ cmake_content = dedent(f'''
+ cmake_minimum_required(VERSION 3.7)
+ project(CompInfo {' '.join(lang_ids)})
+ ''')
+
+ build_dir = Path(self.env.scratch_dir) / '__CMake_compiler_info__'
+ build_dir.mkdir(parents=True, exist_ok=True)
+ cmake_file = build_dir / 'CMakeLists.txt'
+ cmake_file.write_text(cmake_content, encoding='utf-8')
+
+ # Generate the temporary toolchain file
+ temp_toolchain_file = build_dir / 'CMakeMesonTempToolchainFile.cmake'
+ temp_toolchain_file.write_text(CMakeToolchain._print_vars(self.variables), encoding='utf-8')
+
+ # Configure
+ trace = CMakeTraceParser(self.cmakebin.version(), build_dir)
+ self.cmakebin.set_exec_mode(print_cmout=False, always_capture_stderr=trace.requires_stderr())
+ cmake_args = []
+ cmake_args += trace.trace_args()
+ cmake_args += cmake_get_generator_args(self.env)
+ cmake_args += [f'-DCMAKE_TOOLCHAIN_FILE={temp_toolchain_file.as_posix()}', '.']
+ rc, _, raw_trace = self.cmakebin.call(cmake_args, build_dir=build_dir, disable_cache=True)
+
+ if rc != 0:
+ mlog.warning('CMake Toolchain: Failed to determine CMake compilers state')
+ return
+
+ # Parse output
+ trace.parse(raw_trace)
+ self.cmakestate.cmake_cache = {**trace.cache}
+
+ vars_by_file = {k.name: v for (k, v) in trace.vars_by_file.items()}
+
+ for lang in languages:
+ lang_cmake = language_map.get(lang, lang.upper())
+ file_name = f'CMake{lang_cmake}Compiler.cmake'
+ vars = vars_by_file.setdefault(file_name, {})
+ vars[f'CMAKE_{lang_cmake}_COMPILER_FORCED'] = ['1']
+ self.cmakestate.update(lang, vars)
diff --git a/meson/mesonbuild/cmake/traceparser.py b/meson/mesonbuild/cmake/traceparser.py
new file mode 100644
index 000000000..4ddc91533
--- /dev/null
+++ b/meson/mesonbuild/cmake/traceparser.py
@@ -0,0 +1,756 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+from .common import CMakeException
+from .generator import parse_generator_expressions
+from .. import mlog
+from ..mesonlib import version_compare
+
+import typing as T
+from pathlib import Path
+from functools import lru_cache
+import re
+import json
+import textwrap
+
+class CMakeTraceLine:
+ def __init__(self, file_str: str, line: int, func: str, args: T.List[str]) -> None:
+ self.file = CMakeTraceLine._to_path(file_str)
+ self.line = line
+ self.func = func.lower()
+ self.args = args
+
+ @staticmethod
+ @lru_cache(maxsize=None)
+ def _to_path(file_str: str) -> Path:
+ return Path(file_str)
+
+ def __repr__(self) -> str:
+ s = 'CMake TRACE: {0}:{1} {2}({3})'
+ return s.format(self.file, self.line, self.func, self.args)
+
+class CMakeCacheEntry(T.NamedTuple):
+ value: T.List[str]
+ type: str
+
+class CMakeTarget:
+ def __init__(
+ self,
+ name: str,
+ target_type: str,
+ properties: T.Optional[T.Dict[str, T.List[str]]] = None,
+ imported: bool = False,
+ tline: T.Optional[CMakeTraceLine] = None
+ ):
+ if properties is None:
+ properties = {}
+ self.name = name
+ self.type = target_type
+ self.properties = properties
+ self.imported = imported
+ self.tline = tline
+ self.depends = [] # type: T.List[str]
+ self.current_bin_dir = None # type: T.Optional[Path]
+ self.current_src_dir = None # type: T.Optional[Path]
+
+ def __repr__(self) -> str:
+ s = 'CMake TARGET:\n -- name: {}\n -- type: {}\n -- imported: {}\n -- properties: {{\n{} }}\n -- tline: {}'
+ propSTR = ''
+ for i in self.properties:
+ propSTR += " '{}': {}\n".format(i, self.properties[i])
+ return s.format(self.name, self.type, self.imported, propSTR, self.tline)
+
+ def strip_properties(self) -> None:
+ # Strip the strings in the properties
+ if not self.properties:
+ return
+ for key, val in self.properties.items():
+ self.properties[key] = [x.strip() for x in val]
+ assert all([';' not in x for x in self.properties[key]])
+
+class CMakeGeneratorTarget(CMakeTarget):
+ def __init__(self, name: str) -> None:
+ super().__init__(name, 'CUSTOM', {})
+ self.outputs = [] # type: T.List[Path]
+ self.command = [] # type: T.List[T.List[str]]
+ self.working_dir = None # type: T.Optional[Path]
+
+class CMakeTraceParser:
+ def __init__(self, cmake_version: str, build_dir: Path, permissive: bool = True) -> None:
+ self.vars: T.Dict[str, T.List[str]] = {}
+ self.vars_by_file: T.Dict[Path, T.Dict[str, T.List[str]]] = {}
+ self.targets: T.Dict[str, CMakeTarget] = {}
+ self.cache: T.Dict[str, CMakeCacheEntry] = {}
+
+ self.explicit_headers = set() # type: T.Set[Path]
+
+ # T.List of targes that were added with add_custom_command to generate files
+ self.custom_targets = [] # type: T.List[CMakeGeneratorTarget]
+
+ self.permissive = permissive # type: bool
+ self.cmake_version = cmake_version # type: str
+ self.trace_file = 'cmake_trace.txt'
+ self.trace_file_path = build_dir / self.trace_file
+ self.trace_format = 'json-v1' if version_compare(cmake_version, '>=3.17') else 'human'
+
+ # State for delayed command execution. Delayed command execution is realised
+ # with a custom CMake file that overrides some functions and adds some
+ # introspection information to the trace.
+ self.delayed_commands = [] # type: T.List[str]
+ self.stored_commands = [] # type: T.List[CMakeTraceLine]
+
+ # All supported functions
+ self.functions = {
+ 'set': self._cmake_set,
+ 'unset': self._cmake_unset,
+ 'add_executable': self._cmake_add_executable,
+ 'add_library': self._cmake_add_library,
+ 'add_custom_command': self._cmake_add_custom_command,
+ 'add_custom_target': self._cmake_add_custom_target,
+ 'set_property': self._cmake_set_property,
+ 'set_target_properties': self._cmake_set_target_properties,
+ 'target_compile_definitions': self._cmake_target_compile_definitions,
+ 'target_compile_options': self._cmake_target_compile_options,
+ 'target_include_directories': self._cmake_target_include_directories,
+ 'target_link_libraries': self._cmake_target_link_libraries,
+ 'target_link_options': self._cmake_target_link_options,
+ 'add_dependencies': self._cmake_add_dependencies,
+
+ # Special functions defined in the preload script.
+ # These functions do nothing in the CMake code, but have special
+ # meaning here in the trace parser.
+ 'meson_ps_execute_delayed_calls': self._meson_ps_execute_delayed_calls,
+ 'meson_ps_reload_vars': self._meson_ps_reload_vars,
+ 'meson_ps_disabled_function': self._meson_ps_disabled_function,
+ } # type: T.Dict[str, T.Callable[[CMakeTraceLine], None]]
+
+ def trace_args(self) -> T.List[str]:
+ arg_map = {
+ 'human': ['--trace', '--trace-expand'],
+ 'json-v1': ['--trace-expand', '--trace-format=json-v1'],
+ }
+
+ base_args = ['--no-warn-unused-cli']
+ if not self.requires_stderr():
+ base_args += [f'--trace-redirect={self.trace_file}']
+
+ return arg_map[self.trace_format] + base_args
+
+ def requires_stderr(self) -> bool:
+ return version_compare(self.cmake_version, '<3.16')
+
+ def parse(self, trace: T.Optional[str] = None) -> None:
+ # First load the trace (if required)
+ if not self.requires_stderr():
+ if not self.trace_file_path.exists and not self.trace_file_path.is_file():
+ raise CMakeException(f'CMake: Trace file "{self.trace_file_path!s}" not found')
+ trace = self.trace_file_path.read_text(errors='ignore', encoding='utf-8')
+ if not trace:
+ raise CMakeException('CMake: The CMake trace was not provided or is empty')
+
+ # Second parse the trace
+ lexer1 = None
+ if self.trace_format == 'human':
+ lexer1 = self._lex_trace_human(trace)
+ elif self.trace_format == 'json-v1':
+ lexer1 = self._lex_trace_json(trace)
+ else:
+ raise CMakeException(f'CMake: Internal error: Invalid trace format {self.trace_format}. Expected [human, json-v1]')
+
+ # Primary pass -- parse everything
+ for l in lexer1:
+ # store the function if its execution should be delayed
+ if l.func in self.delayed_commands:
+ self.stored_commands += [l]
+ continue
+
+ # "Execute" the CMake function if supported
+ fn = self.functions.get(l.func, None)
+ if(fn):
+ fn(l)
+
+ # Postprocess
+ for tgt in self.targets.values():
+ tgt.strip_properties()
+
+ def get_first_cmake_var_of(self, var_list: T.List[str]) -> T.List[str]:
+ # Return the first found CMake variable in list var_list
+ for i in var_list:
+ if i in self.vars:
+ return self.vars[i]
+
+ return []
+
+ def get_cmake_var(self, var: str) -> T.List[str]:
+ # Return the value of the CMake variable var or an empty list if var does not exist
+ if var in self.vars:
+ return self.vars[var]
+
+ return []
+
+ def var_to_str(self, var: str) -> T.Optional[str]:
+ if var in self.vars and self.vars[var]:
+ return self.vars[var][0]
+
+ return None
+
+ def _str_to_bool(self, expr: T.Union[str, T.List[str]]) -> bool:
+ if not expr:
+ return False
+ if isinstance(expr, list):
+ expr_str = expr[0]
+ else:
+ expr_str = expr
+ expr_str = expr_str.upper()
+ return expr_str not in ['0', 'OFF', 'NO', 'FALSE', 'N', 'IGNORE'] and not expr_str.endswith('NOTFOUND')
+
+ def var_to_bool(self, var: str) -> bool:
+ return self._str_to_bool(self.vars.get(var, []))
+
+ def _gen_exception(self, function: str, error: str, tline: CMakeTraceLine) -> None:
+ # Generate an exception if the parser is not in permissive mode
+
+ if self.permissive:
+ mlog.debug(f'CMake trace warning: {function}() {error}\n{tline}')
+ return None
+ raise CMakeException(f'CMake: {function}() {error}\n{tline}')
+
+ def _cmake_set(self, tline: CMakeTraceLine) -> None:
+ """Handler for the CMake set() function in all variaties.
+
+ comes in three flavors:
+ set(<var> <value> [PARENT_SCOPE])
+ set(<var> <value> CACHE <type> <docstring> [FORCE])
+ set(ENV{<var>} <value>)
+
+ We don't support the ENV variant, and any uses of it will be ignored
+ silently. the other two variates are supported, with some caveats:
+ - we don't properly handle scoping, so calls to set() inside a
+ function without PARENT_SCOPE set could incorrectly shadow the
+ outer scope.
+ - We don't honor the type of CACHE arguments
+ """
+ # DOC: https://cmake.org/cmake/help/latest/command/set.html
+
+ cache_type = None
+ cache_force = 'FORCE' in tline.args
+ try:
+ cache_idx = tline.args.index('CACHE')
+ cache_type = tline.args[cache_idx + 1]
+ except (ValueError, IndexError):
+ pass
+
+ # 1st remove PARENT_SCOPE and CACHE from args
+ args = []
+ for i in tline.args:
+ if not i or i == 'PARENT_SCOPE':
+ continue
+
+ # Discard everything after the CACHE keyword
+ if i == 'CACHE':
+ break
+
+ args.append(i)
+
+ if len(args) < 1:
+ return self._gen_exception('set', 'requires at least one argument', tline)
+
+ # Now that we've removed extra arguments all that should be left is the
+ # variable identifier and the value, join the value back together to
+ # ensure spaces in the value are correctly handled. This assumes that
+ # variable names don't have spaces. Please don't do that...
+ identifier = args.pop(0)
+ value = ' '.join(args)
+
+ # Write to the CMake cache instead
+ if cache_type:
+ # Honor how the CMake FORCE parameter works
+ if identifier not in self.cache or cache_force:
+ self.cache[identifier] = CMakeCacheEntry(value.split(';'), cache_type)
+
+ if not value:
+ # Same as unset
+ if identifier in self.vars:
+ del self.vars[identifier]
+ else:
+ self.vars[identifier] = value.split(';')
+ self.vars_by_file.setdefault(tline.file, {})[identifier] = value.split(';')
+
+ def _cmake_unset(self, tline: CMakeTraceLine) -> None:
+ # DOC: https://cmake.org/cmake/help/latest/command/unset.html
+ if len(tline.args) < 1:
+ return self._gen_exception('unset', 'requires at least one argument', tline)
+
+ if tline.args[0] in self.vars:
+ del self.vars[tline.args[0]]
+
+ def _cmake_add_executable(self, tline: CMakeTraceLine) -> None:
+ # DOC: https://cmake.org/cmake/help/latest/command/add_executable.html
+ args = list(tline.args) # Make a working copy
+
+ # Make sure the exe is imported
+ is_imported = True
+ if 'IMPORTED' not in args:
+ return self._gen_exception('add_executable', 'non imported executables are not supported', tline)
+
+ args.remove('IMPORTED')
+
+ if len(args) < 1:
+ return self._gen_exception('add_executable', 'requires at least 1 argument', tline)
+
+ self.targets[args[0]] = CMakeTarget(args[0], 'EXECUTABLE', {}, tline=tline, imported=is_imported)
+
+ def _cmake_add_library(self, tline: CMakeTraceLine) -> None:
+ # DOC: https://cmake.org/cmake/help/latest/command/add_library.html
+ args = list(tline.args) # Make a working copy
+
+ # Make sure the lib is imported
+ if 'INTERFACE' in args:
+ args.remove('INTERFACE')
+
+ if len(args) < 1:
+ return self._gen_exception('add_library', 'interface library name not specified', tline)
+
+ self.targets[args[0]] = CMakeTarget(args[0], 'INTERFACE', {}, tline=tline, imported='IMPORTED' in args)
+ elif 'IMPORTED' in args:
+ args.remove('IMPORTED')
+
+ # Now, only look at the first two arguments (target_name and target_type) and ignore the rest
+ if len(args) < 2:
+ return self._gen_exception('add_library', 'requires at least 2 arguments', tline)
+
+ self.targets[args[0]] = CMakeTarget(args[0], args[1], {}, tline=tline, imported=True)
+ elif 'ALIAS' in args:
+ args.remove('ALIAS')
+
+ # Now, only look at the first two arguments (target_name and target_ref) and ignore the rest
+ if len(args) < 2:
+ return self._gen_exception('add_library', 'requires at least 2 arguments', tline)
+
+ # Simulate the ALIAS with INTERFACE_LINK_LIBRARIES
+ self.targets[args[0]] = CMakeTarget(args[0], 'ALIAS', {'INTERFACE_LINK_LIBRARIES': [args[1]]}, tline=tline)
+ elif 'OBJECT' in args:
+ return self._gen_exception('add_library', 'OBJECT libraries are not supported', tline)
+ else:
+ self.targets[args[0]] = CMakeTarget(args[0], 'NORMAL', {}, tline=tline)
+
+ def _cmake_add_custom_command(self, tline: CMakeTraceLine, name: T.Optional[str] = None) -> None:
+ # DOC: https://cmake.org/cmake/help/latest/command/add_custom_command.html
+ args = self._flatten_args(list(tline.args)) # Commands can be passed as ';' separated lists
+
+ if not args:
+ return self._gen_exception('add_custom_command', 'requires at least 1 argument', tline)
+
+ # Skip the second function signature
+ if args[0] == 'TARGET':
+ return self._gen_exception('add_custom_command', 'TARGET syntax is currently not supported', tline)
+
+ magic_keys = ['OUTPUT', 'COMMAND', 'MAIN_DEPENDENCY', 'DEPENDS', 'BYPRODUCTS',
+ 'IMPLICIT_DEPENDS', 'WORKING_DIRECTORY', 'COMMENT', 'DEPFILE',
+ 'JOB_POOL', 'VERBATIM', 'APPEND', 'USES_TERMINAL', 'COMMAND_EXPAND_LISTS']
+
+ target = CMakeGeneratorTarget(name)
+
+ def handle_output(key: str, target: CMakeGeneratorTarget) -> None:
+ target.outputs += [Path(key)]
+
+ def handle_command(key: str, target: CMakeGeneratorTarget) -> None:
+ if key == 'ARGS':
+ return
+ target.command[-1] += [key]
+
+ def handle_depends(key: str, target: CMakeGeneratorTarget) -> None:
+ target.depends += [key]
+
+ working_dir = None
+ def handle_working_dir(key: str, target: CMakeGeneratorTarget) -> None:
+ nonlocal working_dir
+ if working_dir is None:
+ working_dir = key
+ else:
+ working_dir += ' '
+ working_dir += key
+
+ fn = None
+
+ for i in args:
+ if i in magic_keys:
+ if i == 'OUTPUT':
+ fn = handle_output
+ elif i == 'DEPENDS':
+ fn = handle_depends
+ elif i == 'WORKING_DIRECTORY':
+ fn = handle_working_dir
+ elif i == 'COMMAND':
+ fn = handle_command
+ target.command += [[]]
+ else:
+ fn = None
+ continue
+
+ if fn is not None:
+ fn(i, target)
+
+ cbinary_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_BINARY_DIR')
+ csource_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_SOURCE_DIR')
+
+ target.working_dir = Path(working_dir) if working_dir else None
+ target.current_bin_dir = Path(cbinary_dir) if cbinary_dir else None
+ target.current_src_dir = Path(csource_dir) if csource_dir else None
+ target.outputs = [Path(x) for x in self._guess_files([str(y) for y in target.outputs])]
+ target.depends = self._guess_files(target.depends)
+ target.command = [self._guess_files(x) for x in target.command]
+
+ self.custom_targets += [target]
+ if name:
+ self.targets[name] = target
+
+ def _cmake_add_custom_target(self, tline: CMakeTraceLine) -> None:
+ # DOC: https://cmake.org/cmake/help/latest/command/add_custom_target.html
+ # We only the first parameter (the target name) is interesting
+ if len(tline.args) < 1:
+ return self._gen_exception('add_custom_target', 'requires at least one argument', tline)
+
+ # It's pretty much the same as a custom command
+ self._cmake_add_custom_command(tline, tline.args[0])
+
+ def _cmake_set_property(self, tline: CMakeTraceLine) -> None:
+ # DOC: https://cmake.org/cmake/help/latest/command/set_property.html
+ args = list(tline.args)
+
+ scope = args.pop(0)
+
+ append = False
+ targets = []
+ while args:
+ curr = args.pop(0)
+ # XXX: APPEND_STRING is specifically *not* supposed to create a
+ # list, is treating them as aliases really okay?
+ if curr == 'APPEND' or curr == 'APPEND_STRING':
+ append = True
+ continue
+
+ if curr == 'PROPERTY':
+ break
+
+ targets += curr.split(';')
+
+ if not args:
+ return self._gen_exception('set_property', 'faild to parse argument list', tline)
+
+ if len(args) == 1:
+ # Tries to set property to nothing so nothing has to be done
+ return
+
+ identifier = args.pop(0)
+ if self.trace_format == 'human':
+ value = ' '.join(args).split(';')
+ else:
+ value = [y for x in args for y in x.split(';')]
+ if not value:
+ return
+
+ def do_target(t: str) -> None:
+ if t not in self.targets:
+ return self._gen_exception('set_property', f'TARGET {t} not found', tline)
+
+ tgt = self.targets[t]
+ if identifier not in tgt.properties:
+ tgt.properties[identifier] = []
+
+ if append:
+ tgt.properties[identifier] += value
+ else:
+ tgt.properties[identifier] = value
+
+ def do_source(src: str) -> None:
+ if identifier != 'HEADER_FILE_ONLY' or not self._str_to_bool(value):
+ return
+
+ current_src_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_SOURCE_DIR')
+ if not current_src_dir:
+ mlog.warning(textwrap.dedent('''\
+ CMake trace: set_property(SOURCE) called before the preload script was loaded.
+ Unable to determine CMAKE_CURRENT_SOURCE_DIR. This can lead to build errors.
+ '''))
+ current_src_dir = '.'
+
+ cur_p = Path(current_src_dir)
+ src_p = Path(src)
+
+ if not src_p.is_absolute():
+ src_p = cur_p / src_p
+ self.explicit_headers.add(src_p)
+
+ if scope == 'TARGET':
+ for i in targets:
+ do_target(i)
+ elif scope == 'SOURCE':
+ files = self._guess_files(targets)
+ for i in files:
+ do_source(i)
+
+ def _cmake_set_target_properties(self, tline: CMakeTraceLine) -> None:
+ # DOC: https://cmake.org/cmake/help/latest/command/set_target_properties.html
+ args = list(tline.args)
+
+ targets = []
+ while args:
+ curr = args.pop(0)
+ if curr == 'PROPERTIES':
+ break
+
+ targets.append(curr)
+
+ # Now we need to try to reconsitute the original quoted format of the
+ # arguments, as a property value could have spaces in it. Unlike
+ # set_property() this is not context free. There are two approaches I
+ # can think of, both have drawbacks:
+ #
+ # 1. Assume that the property will be capitalized ([A-Z_]), this is
+ # convention but cmake doesn't require it.
+ # 2. Maintain a copy of the list here: https://cmake.org/cmake/help/latest/manual/cmake-properties.7.html#target-properties
+ #
+ # Neither of these is awesome for obvious reasons. I'm going to try
+ # option 1 first and fall back to 2, as 1 requires less code and less
+ # synchroniztion for cmake changes.
+ #
+ # With the JSON output format, introduced in CMake 3.17, spaces are
+ # handled properly and we don't have to do either options
+
+ arglist = [] # type: T.List[T.Tuple[str, T.List[str]]]
+ if self.trace_format == 'human':
+ name = args.pop(0)
+ values = [] # type: T.List[str]
+ prop_regex = re.compile(r'^[A-Z_]+$')
+ for a in args:
+ if prop_regex.match(a):
+ if values:
+ arglist.append((name, ' '.join(values).split(';')))
+ name = a
+ values = []
+ else:
+ values.append(a)
+ if values:
+ arglist.append((name, ' '.join(values).split(';')))
+ else:
+ arglist = [(x[0], x[1].split(';')) for x in zip(args[::2], args[1::2])]
+
+ for name, value in arglist:
+ for i in targets:
+ if i not in self.targets:
+ return self._gen_exception('set_target_properties', f'TARGET {i} not found', tline)
+
+ self.targets[i].properties[name] = value
+
+ def _cmake_add_dependencies(self, tline: CMakeTraceLine) -> None:
+ # DOC: https://cmake.org/cmake/help/latest/command/add_dependencies.html
+ args = list(tline.args)
+
+ if len(args) < 2:
+ return self._gen_exception('add_dependencies', 'takes at least 2 arguments', tline)
+
+ target = self.targets.get(args[0])
+ if not target:
+ return self._gen_exception('add_dependencies', 'target not found', tline)
+
+ for i in args[1:]:
+ target.depends += i.split(';')
+
+ def _cmake_target_compile_definitions(self, tline: CMakeTraceLine) -> None:
+ # DOC: https://cmake.org/cmake/help/latest/command/target_compile_definitions.html
+ self._parse_common_target_options('target_compile_definitions', 'COMPILE_DEFINITIONS', 'INTERFACE_COMPILE_DEFINITIONS', tline)
+
+ def _cmake_target_compile_options(self, tline: CMakeTraceLine) -> None:
+ # DOC: https://cmake.org/cmake/help/latest/command/target_compile_options.html
+ self._parse_common_target_options('target_compile_options', 'COMPILE_OPTIONS', 'INTERFACE_COMPILE_OPTIONS', tline)
+
+ def _cmake_target_include_directories(self, tline: CMakeTraceLine) -> None:
+ # DOC: https://cmake.org/cmake/help/latest/command/target_include_directories.html
+ self._parse_common_target_options('target_include_directories', 'INCLUDE_DIRECTORIES', 'INTERFACE_INCLUDE_DIRECTORIES', tline, ignore=['SYSTEM', 'BEFORE'], paths=True)
+
+ def _cmake_target_link_options(self, tline: CMakeTraceLine) -> None:
+ # DOC: https://cmake.org/cmake/help/latest/command/target_link_options.html
+ self._parse_common_target_options('target_link_options', 'LINK_OPTIONS', 'INTERFACE_LINK_OPTIONS', tline)
+
+ def _cmake_target_link_libraries(self, tline: CMakeTraceLine) -> None:
+ # DOC: https://cmake.org/cmake/help/latest/command/target_link_libraries.html
+ self._parse_common_target_options('target_link_options', 'LINK_LIBRARIES', 'INTERFACE_LINK_LIBRARIES', tline)
+
+ def _parse_common_target_options(self, func: str, private_prop: str, interface_prop: str, tline: CMakeTraceLine, ignore: T.Optional[T.List[str]] = None, paths: bool = False) -> None:
+ if ignore is None:
+ ignore = ['BEFORE']
+
+ args = list(tline.args)
+
+ if len(args) < 1:
+ return self._gen_exception(func, 'requires at least one argument', tline)
+
+ target = args[0]
+ if target not in self.targets:
+ return self._gen_exception(func, f'TARGET {target} not found', tline)
+
+ interface = []
+ private = []
+
+ mode = 'PUBLIC'
+ for i in args[1:]:
+ if i in ignore:
+ continue
+
+ if i in ['INTERFACE', 'LINK_INTERFACE_LIBRARIES', 'PUBLIC', 'PRIVATE', 'LINK_PUBLIC', 'LINK_PRIVATE']:
+ mode = i
+ continue
+
+ if mode in ['INTERFACE', 'LINK_INTERFACE_LIBRARIES', 'PUBLIC', 'LINK_PUBLIC']:
+ interface += i.split(';')
+
+ if mode in ['PUBLIC', 'PRIVATE', 'LINK_PRIVATE']:
+ private += i.split(';')
+
+ if paths:
+ interface = self._guess_files(interface)
+ private = self._guess_files(private)
+
+ interface = [x for x in interface if x]
+ private = [x for x in private if x]
+
+ for j in [(private_prop, private), (interface_prop, interface)]:
+ if not j[0] in self.targets[target].properties:
+ self.targets[target].properties[j[0]] = []
+
+ self.targets[target].properties[j[0]] += j[1]
+
+ def _meson_ps_execute_delayed_calls(self, tline: CMakeTraceLine) -> None:
+ for l in self.stored_commands:
+ fn = self.functions.get(l.func, None)
+ if(fn):
+ fn(l)
+
+ # clear the stored commands
+ self.stored_commands = []
+
+ def _meson_ps_reload_vars(self, tline: CMakeTraceLine) -> None:
+ self.delayed_commands = self.get_cmake_var('MESON_PS_DELAYED_CALLS')
+
+ def _meson_ps_disabled_function(self, tline: CMakeTraceLine) -> None:
+ args = list(tline.args)
+ if not args:
+ mlog.error('Invalid preload.cmake script! At least one argument to `meson_ps_disabled_function` is expected')
+ return
+ mlog.warning(f'The CMake function "{args[0]}" was disabled to avoid compatibility issues with Meson.')
+
+ def _lex_trace_human(self, trace: str) -> T.Generator[CMakeTraceLine, None, None]:
+ # The trace format is: '<file>(<line>): <func>(<args -- can contain \n> )\n'
+ reg_tline = re.compile(r'\s*(.*\.(cmake|txt))\(([0-9]+)\):\s*(\w+)\(([\s\S]*?) ?\)\s*\n', re.MULTILINE)
+ reg_other = re.compile(r'[^\n]*\n')
+ loc = 0
+ while loc < len(trace):
+ mo_file_line = reg_tline.match(trace, loc)
+ if not mo_file_line:
+ skip_match = reg_other.match(trace, loc)
+ if not skip_match:
+ print(trace[loc:])
+ raise CMakeException('Failed to parse CMake trace')
+
+ loc = skip_match.end()
+ continue
+
+ loc = mo_file_line.end()
+
+ file = mo_file_line.group(1)
+ line = mo_file_line.group(3)
+ func = mo_file_line.group(4)
+ args = mo_file_line.group(5)
+ args = parse_generator_expressions(args)
+ argl = args.split(' ')
+ argl = list(map(lambda x: x.strip(), argl))
+
+ yield CMakeTraceLine(file, int(line), func, argl)
+
+ def _lex_trace_json(self, trace: str) -> T.Generator[CMakeTraceLine, None, None]:
+ lines = trace.splitlines(keepends=False)
+ lines.pop(0) # The first line is the version
+ for i in lines:
+ data = json.loads(i)
+ assert isinstance(data['file'], str)
+ assert isinstance(data['line'], int)
+ assert isinstance(data['cmd'], str)
+ assert isinstance(data['args'], list)
+ args = data['args']
+ for j in args:
+ assert isinstance(j, str)
+ args = [parse_generator_expressions(x) for x in args]
+ yield CMakeTraceLine(data['file'], data['line'], data['cmd'], args)
+
+ def _flatten_args(self, args: T.List[str]) -> T.List[str]:
+ # Split lists in arguments
+ res = [] # type: T.List[str]
+ for i in args:
+ res += i.split(';')
+ return res
+
+ def _guess_files(self, broken_list: T.List[str]) -> T.List[str]:
+ # Nothing has to be done for newer formats
+ if self.trace_format != 'human':
+ return broken_list
+
+ # Try joining file paths that contain spaces
+
+ reg_start = re.compile(r'^([A-Za-z]:)?/(.*/)*[^./]+$')
+ reg_end = re.compile(r'^.*\.[a-zA-Z]+$')
+
+ fixed_list = [] # type: T.List[str]
+ curr_str = None # type: T.Optional[str]
+ path_found = False # type: bool
+
+ for i in broken_list:
+ if curr_str is None:
+ curr_str = i
+ path_found = False
+ elif Path(curr_str).is_file():
+ # Abort concatenation if curr_str is an existing file
+ fixed_list += [curr_str]
+ curr_str = i
+ path_found = False
+ elif not reg_start.match(curr_str):
+ # Abort concatenation if curr_str no longer matches the regex
+ fixed_list += [curr_str]
+ curr_str = i
+ path_found = False
+ elif reg_end.match(i):
+ # File detected
+ curr_str = f'{curr_str} {i}'
+ fixed_list += [curr_str]
+ curr_str = None
+ path_found = False
+ elif Path(f'{curr_str} {i}').exists():
+ # Path detected
+ curr_str = f'{curr_str} {i}'
+ path_found = True
+ elif path_found:
+ # Add path to fixed_list after ensuring the whole path is in curr_str
+ fixed_list += [curr_str]
+ curr_str = i
+ path_found = False
+ else:
+ curr_str = f'{curr_str} {i}'
+ path_found = False
+
+ if curr_str:
+ fixed_list += [curr_str]
+ return fixed_list
diff --git a/meson/mesonbuild/compilers/__init__.py b/meson/mesonbuild/compilers/__init__.py
new file mode 100644
index 000000000..3d39c9b2f
--- /dev/null
+++ b/meson/mesonbuild/compilers/__init__.py
@@ -0,0 +1,250 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Public symbols for compilers sub-package when using 'from . import compilers'
+__all__ = [
+ 'Compiler',
+ 'RunResult',
+
+ 'all_languages',
+ 'base_options',
+ 'clib_langs',
+ 'clink_langs',
+ 'c_suffixes',
+ 'cpp_suffixes',
+ 'get_base_compile_args',
+ 'get_base_link_args',
+ 'is_assembly',
+ 'is_header',
+ 'is_library',
+ 'is_llvm_ir',
+ 'is_object',
+ 'is_source',
+ 'is_known_suffix',
+ 'lang_suffixes',
+ 'sort_clink',
+
+ 'compiler_from_language',
+ 'detect_compiler_for',
+ 'detect_static_linker',
+ 'detect_c_compiler',
+ 'detect_cpp_compiler',
+ 'detect_cuda_compiler',
+ 'detect_fortran_compiler',
+ 'detect_objc_compiler',
+ 'detect_objcpp_compiler',
+ 'detect_java_compiler',
+ 'detect_cs_compiler',
+ 'detect_vala_compiler',
+ 'detect_rust_compiler',
+ 'detect_d_compiler',
+ 'detect_swift_compiler',
+
+ 'AppleClangCCompiler',
+ 'AppleClangCPPCompiler',
+ 'AppleClangObjCCompiler',
+ 'AppleClangObjCPPCompiler',
+ 'ArmCCompiler',
+ 'ArmCPPCompiler',
+ 'ArmclangCCompiler',
+ 'ArmclangCPPCompiler',
+ 'CCompiler',
+ 'ClangCCompiler',
+ 'ClangCompiler',
+ 'ClangCPPCompiler',
+ 'ClangObjCCompiler',
+ 'ClangObjCPPCompiler',
+ 'ClangClCCompiler',
+ 'ClangClCPPCompiler',
+ 'CPPCompiler',
+ 'DCompiler',
+ 'DmdDCompiler',
+ 'FortranCompiler',
+ 'G95FortranCompiler',
+ 'GnuCCompiler',
+ 'ElbrusCCompiler',
+ 'EmscriptenCCompiler',
+ 'GnuCompiler',
+ 'GnuLikeCompiler',
+ 'GnuCPPCompiler',
+ 'ElbrusCPPCompiler',
+ 'EmscriptenCPPCompiler',
+ 'GnuDCompiler',
+ 'GnuFortranCompiler',
+ 'ElbrusFortranCompiler',
+ 'FlangFortranCompiler',
+ 'GnuObjCCompiler',
+ 'GnuObjCPPCompiler',
+ 'IntelGnuLikeCompiler',
+ 'IntelVisualStudioLikeCompiler',
+ 'IntelCCompiler',
+ 'IntelCPPCompiler',
+ 'IntelClCCompiler',
+ 'IntelClCPPCompiler',
+ 'IntelFortranCompiler',
+ 'IntelClFortranCompiler',
+ 'JavaCompiler',
+ 'LLVMDCompiler',
+ 'MonoCompiler',
+ 'CudaCompiler',
+ 'VisualStudioCsCompiler',
+ 'NAGFortranCompiler',
+ 'ObjCCompiler',
+ 'ObjCPPCompiler',
+ 'Open64FortranCompiler',
+ 'PathScaleFortranCompiler',
+ 'NvidiaHPC_CCompiler',
+ 'NvidiaHPC_CPPCompiler',
+ 'NvidiaHPC_FortranCompiler',
+ 'PGICCompiler',
+ 'PGICPPCompiler',
+ 'PGIFortranCompiler',
+ 'RustCompiler',
+ 'CcrxCCompiler',
+ 'CcrxCPPCompiler',
+ 'Xc16CCompiler',
+ 'CompCertCCompiler',
+ 'C2000CCompiler',
+ 'C2000CPPCompiler',
+ 'SunFortranCompiler',
+ 'SwiftCompiler',
+ 'ValaCompiler',
+ 'VisualStudioLikeCompiler',
+ 'VisualStudioCCompiler',
+ 'VisualStudioCPPCompiler',
+ 'CythonCompiler',
+]
+
+# Bring symbols from each module into compilers sub-package namespace
+from .compilers import (
+ Compiler,
+ RunResult,
+ all_languages,
+ base_options,
+ clib_langs,
+ clink_langs,
+ c_suffixes,
+ cpp_suffixes,
+ get_base_compile_args,
+ get_base_link_args,
+ is_header,
+ is_source,
+ is_assembly,
+ is_llvm_ir,
+ is_object,
+ is_library,
+ is_known_suffix,
+ lang_suffixes,
+ LANGUAGES_USING_LDFLAGS,
+ sort_clink,
+)
+from .detect import (
+ compiler_from_language,
+ detect_compiler_for,
+ detect_static_linker,
+ detect_c_compiler,
+ detect_cpp_compiler,
+ detect_cuda_compiler,
+ detect_objc_compiler,
+ detect_objcpp_compiler,
+ detect_fortran_compiler,
+ detect_java_compiler,
+ detect_cs_compiler,
+ detect_vala_compiler,
+ detect_rust_compiler,
+ detect_d_compiler,
+ detect_swift_compiler,
+)
+from .c import (
+ CCompiler,
+ AppleClangCCompiler,
+ ArmCCompiler,
+ ArmclangCCompiler,
+ ClangCCompiler,
+ ClangClCCompiler,
+ GnuCCompiler,
+ ElbrusCCompiler,
+ EmscriptenCCompiler,
+ IntelCCompiler,
+ IntelClCCompiler,
+ NvidiaHPC_CCompiler,
+ PGICCompiler,
+ CcrxCCompiler,
+ Xc16CCompiler,
+ CompCertCCompiler,
+ C2000CCompiler,
+ VisualStudioCCompiler,
+)
+from .cpp import (
+ CPPCompiler,
+ AppleClangCPPCompiler,
+ ArmCPPCompiler,
+ ArmclangCPPCompiler,
+ ClangCPPCompiler,
+ ClangClCPPCompiler,
+ GnuCPPCompiler,
+ ElbrusCPPCompiler,
+ EmscriptenCPPCompiler,
+ IntelCPPCompiler,
+ IntelClCPPCompiler,
+ NvidiaHPC_CPPCompiler,
+ PGICPPCompiler,
+ CcrxCPPCompiler,
+ C2000CPPCompiler,
+ VisualStudioCPPCompiler,
+)
+from .cs import MonoCompiler, VisualStudioCsCompiler
+from .d import (
+ DCompiler,
+ DmdDCompiler,
+ GnuDCompiler,
+ LLVMDCompiler,
+)
+from .cuda import CudaCompiler
+from .fortran import (
+ FortranCompiler,
+ G95FortranCompiler,
+ GnuFortranCompiler,
+ ElbrusFortranCompiler,
+ FlangFortranCompiler,
+ IntelFortranCompiler,
+ IntelClFortranCompiler,
+ NAGFortranCompiler,
+ Open64FortranCompiler,
+ PathScaleFortranCompiler,
+ NvidiaHPC_FortranCompiler,
+ PGIFortranCompiler,
+ SunFortranCompiler,
+)
+from .java import JavaCompiler
+from .objc import (
+ ObjCCompiler,
+ AppleClangObjCCompiler,
+ ClangObjCCompiler,
+ GnuObjCCompiler,
+)
+from .objcpp import (
+ ObjCPPCompiler,
+ AppleClangObjCPPCompiler,
+ ClangObjCPPCompiler,
+ GnuObjCPPCompiler,
+)
+from .rust import RustCompiler
+from .swift import SwiftCompiler
+from .vala import ValaCompiler
+from .mixins.visualstudio import VisualStudioLikeCompiler
+from .mixins.gnu import GnuCompiler, GnuLikeCompiler
+from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler
+from .mixins.clang import ClangCompiler
+from .cython import CythonCompiler
diff --git a/meson/mesonbuild/compilers/c.py b/meson/mesonbuild/compilers/c.py
new file mode 100644
index 000000000..8f6218195
--- /dev/null
+++ b/meson/mesonbuild/compilers/c.py
@@ -0,0 +1,714 @@
+# Copyright 2012-2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path
+import typing as T
+
+from .. import coredata
+from .. import mlog
+from ..mesonlib import MachineChoice, MesonException, version_compare, OptionKey
+from .c_function_attributes import C_FUNC_ATTRIBUTES
+from .mixins.clike import CLikeCompiler
+from .mixins.ccrx import CcrxCompiler
+from .mixins.xc16 import Xc16Compiler
+from .mixins.compcert import CompCertCompiler
+from .mixins.c2000 import C2000Compiler
+from .mixins.arm import ArmCompiler, ArmclangCompiler
+from .mixins.visualstudio import MSVCCompiler, ClangClCompiler
+from .mixins.gnu import GnuCompiler
+from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler
+from .mixins.clang import ClangCompiler
+from .mixins.elbrus import ElbrusCompiler
+from .mixins.pgi import PGICompiler
+from .mixins.emscripten import EmscriptenMixin
+from .compilers import (
+ gnu_winlibs,
+ msvc_winlibs,
+ Compiler,
+)
+
+if T.TYPE_CHECKING:
+ from ..coredata import KeyedOptionDictType
+ from ..dependencies import Dependency
+ from ..envconfig import MachineInfo
+ from ..environment import Environment
+ from ..linkers import DynamicLinker
+ from ..programs import ExternalProgram
+
+ CompilerMixinBase = Compiler
+else:
+ CompilerMixinBase = object
+
+
+
+class CCompiler(CLikeCompiler, Compiler):
+
+ @staticmethod
+ def attribute_check_func(name: str) -> str:
+ try:
+ return C_FUNC_ATTRIBUTES[name]
+ except KeyError:
+ raise MesonException(f'Unknown function attribute "{name}"')
+
+ language = 'c'
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ # If a child ObjC or CPP class has already set it, don't set it ourselves
+ Compiler.__init__(self, exelist, version, for_machine, info,
+ is_cross=is_cross, full_version=full_version, linker=linker)
+ CLikeCompiler.__init__(self, exe_wrapper)
+
+ def get_no_stdinc_args(self) -> T.List[str]:
+ return ['-nostdinc']
+
+ def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+ code = 'int main(void) { int class=0; return class; }\n'
+ return self._sanity_check_impl(work_dir, environment, 'sanitycheckc.c', code)
+
+ def has_header_symbol(self, hname: str, symbol: str, prefix: str,
+ env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
+ fargs = {'prefix': prefix, 'header': hname, 'symbol': symbol}
+ t = '''{prefix}
+ #include <{header}>
+ int main(void) {{
+ /* If it's not defined as a macro, try to use as a symbol */
+ #ifndef {symbol}
+ {symbol};
+ #endif
+ return 0;
+ }}'''
+ return self.compiles(t.format(**fargs), env, extra_args=extra_args,
+ dependencies=dependencies)
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = super().get_options()
+ opts.update({
+ OptionKey('std', machine=self.for_machine, lang=self.language): coredata.UserComboOption(
+ 'C language standard to use',
+ ['none'],
+ 'none',
+ )
+ })
+ return opts
+
+
+class _ClangCStds(CompilerMixinBase):
+
+ """Mixin class for clang based compilers for setting C standards.
+
+ This is used by both ClangCCompiler and ClangClCompiler, as they share
+ the same versions
+ """
+
+ _C17_VERSION = '>=6.0.0'
+ _C18_VERSION = '>=8.0.0'
+ _C2X_VERSION = '>=9.0.0'
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = super().get_options()
+ c_stds = ['c89', 'c99', 'c11']
+ g_stds = ['gnu89', 'gnu99', 'gnu11']
+ # https://releases.llvm.org/6.0.0/tools/clang/docs/ReleaseNotes.html
+ # https://en.wikipedia.org/wiki/Xcode#Latest_versions
+ if version_compare(self.version, self._C17_VERSION):
+ c_stds += ['c17']
+ g_stds += ['gnu17']
+ if version_compare(self.version, self._C18_VERSION):
+ c_stds += ['c18']
+ g_stds += ['gnu18']
+ if version_compare(self.version, self._C2X_VERSION):
+ c_stds += ['c2x']
+ g_stds += ['gnu2x']
+ opts[OptionKey('std', machine=self.for_machine, lang=self.language)].choices = ['none'] + c_stds + g_stds
+ return opts
+
+
+class ClangCCompiler(_ClangCStds, ClangCompiler, CCompiler):
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ defines: T.Optional[T.Dict[str, str]] = None,
+ full_version: T.Optional[str] = None):
+ CCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, linker=linker, full_version=full_version)
+ ClangCompiler.__init__(self, defines)
+ default_warn_args = ['-Wall', '-Winvalid-pch']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = super().get_options()
+ if self.info.is_windows() or self.info.is_cygwin():
+ opts.update({
+ OptionKey('winlibs', machine=self.for_machine, lang=self.language): coredata.UserArrayOption(
+ 'Standard Win libraries to link against',
+ gnu_winlibs,
+ ),
+ })
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ std = options[OptionKey('std', machine=self.for_machine, lang=self.language)]
+ if std.value != 'none':
+ args.append('-std=' + std.value)
+ return args
+
+ def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ if self.info.is_windows() or self.info.is_cygwin():
+ # without a typedict mypy can't understand this.
+ libs = options[OptionKey('winlibs', machine=self.for_machine, lang=self.language)].value.copy()
+ assert isinstance(libs, list)
+ for l in libs:
+ assert isinstance(l, str)
+ return libs
+ return []
+
+
+class AppleClangCCompiler(ClangCCompiler):
+
+ """Handle the differences between Apple Clang and Vanilla Clang.
+
+ Right now this just handles the differences between the versions that new
+ C standards were added.
+ """
+
+ _C17_VERSION = '>=10.0.0'
+ _C18_VERSION = '>=11.0.0'
+ _C2X_VERSION = '>=11.0.0'
+
+
+class EmscriptenCCompiler(EmscriptenMixin, ClangCCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ defines: T.Optional[T.Dict[str, str]] = None,
+ full_version: T.Optional[str] = None):
+ if not is_cross:
+ raise MesonException('Emscripten compiler can only be used for cross compilation.')
+ ClangCCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper=exe_wrapper, linker=linker,
+ defines=defines, full_version=full_version)
+ self.id = 'emscripten'
+
+
+class ArmclangCCompiler(ArmclangCompiler, CCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ ArmclangCompiler.__init__(self)
+ default_warn_args = ['-Wall', '-Winvalid-pch']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = CCompiler.get_options(self)
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts[key].choices = ['none', 'c90', 'c99', 'c11', 'gnu90', 'gnu99', 'gnu11']
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ std = options[OptionKey('std', machine=self.for_machine, lang=self.language)]
+ if std.value != 'none':
+ args.append('-std=' + std.value)
+ return args
+
+ def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return []
+
+
+class GnuCCompiler(GnuCompiler, CCompiler):
+
+ _C18_VERSION = '>=8.0.0'
+ _C2X_VERSION = '>=9.0.0'
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ defines: T.Optional[T.Dict[str, str]] = None,
+ full_version: T.Optional[str] = None):
+ CCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, linker=linker, full_version=full_version)
+ GnuCompiler.__init__(self, defines)
+ default_warn_args = ['-Wall', '-Winvalid-pch']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = CCompiler.get_options(self)
+ c_stds = ['c89', 'c99', 'c11']
+ g_stds = ['gnu89', 'gnu99', 'gnu11']
+ if version_compare(self.version, self._C18_VERSION):
+ c_stds += ['c17', 'c18']
+ g_stds += ['gnu17', 'gnu18']
+ if version_compare(self.version, self._C2X_VERSION):
+ c_stds += ['c2x']
+ g_stds += ['gnu2x']
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts[key].choices = ['none'] + c_stds + g_stds
+ if self.info.is_windows() or self.info.is_cygwin():
+ opts.update({
+ key.evolve('winlibs'): coredata.UserArrayOption(
+ 'Standard Win libraries to link against',
+ gnu_winlibs,
+ ),
+ })
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ std = options[OptionKey('std', lang=self.language, machine=self.for_machine)]
+ if std.value != 'none':
+ args.append('-std=' + std.value)
+ return args
+
+ def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ if self.info.is_windows() or self.info.is_cygwin():
+ # without a typeddict mypy can't figure this out
+ libs: T.List[str] = options[OptionKey('winlibs', lang=self.language, machine=self.for_machine)].value.copy()
+ assert isinstance(libs, list)
+ for l in libs:
+ assert isinstance(l, str)
+ return libs
+ return []
+
+ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+ return ['-fpch-preprocess', '-include', os.path.basename(header)]
+
+
+class PGICCompiler(PGICompiler, CCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ PGICompiler.__init__(self)
+
+
+class NvidiaHPC_CCompiler(PGICompiler, CCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ PGICompiler.__init__(self)
+ self.id = 'nvidia_hpc'
+
+
+class ElbrusCCompiler(GnuCCompiler, ElbrusCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ defines: T.Optional[T.Dict[str, str]] = None,
+ full_version: T.Optional[str] = None):
+ GnuCCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, defines=defines,
+ linker=linker, full_version=full_version)
+ ElbrusCompiler.__init__(self)
+
+ # It does support some various ISO standards and c/gnu 90, 9x, 1x in addition to those which GNU CC supports.
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = CCompiler.get_options(self)
+ opts[OptionKey('std', machine=self.for_machine, lang=self.language)].choices = [
+ 'none', 'c89', 'c90', 'c9x', 'c99', 'c1x', 'c11',
+ 'gnu89', 'gnu90', 'gnu9x', 'gnu99', 'gnu1x', 'gnu11',
+ 'iso9899:2011', 'iso9899:1990', 'iso9899:199409', 'iso9899:1999',
+ ]
+ return opts
+
+ # Elbrus C compiler does not have lchmod, but there is only linker warning, not compiler error.
+ # So we should explicitly fail at this case.
+ def has_function(self, funcname: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
+ if funcname == 'lchmod':
+ return False, False
+ else:
+ return super().has_function(funcname, prefix, env,
+ extra_args=extra_args,
+ dependencies=dependencies)
+
+
+class IntelCCompiler(IntelGnuLikeCompiler, CCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ IntelGnuLikeCompiler.__init__(self)
+ self.lang_header = 'c-header'
+ default_warn_args = ['-Wall', '-w3', '-diag-disable:remark']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra']}
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = CCompiler.get_options(self)
+ c_stds = ['c89', 'c99']
+ g_stds = ['gnu89', 'gnu99']
+ if version_compare(self.version, '>=16.0.0'):
+ c_stds += ['c11']
+ opts[OptionKey('std', machine=self.for_machine, lang=self.language)].choices = ['none'] + c_stds + g_stds
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ std = options[OptionKey('std', machine=self.for_machine, lang=self.language)]
+ if std.value != 'none':
+ args.append('-std=' + std.value)
+ return args
+
+
+class VisualStudioLikeCCompilerMixin(CompilerMixinBase):
+
+ """Shared methods that apply to MSVC-like C compilers."""
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = super().get_options()
+ opts.update({
+ OptionKey('winlibs', machine=self.for_machine, lang=self.language): coredata.UserArrayOption(
+ 'Windows libs to link against.',
+ msvc_winlibs,
+ ),
+ })
+ return opts
+
+ def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ # need a TypeDict to make this work
+ key = OptionKey('winlibs', machine=self.for_machine, lang=self.language)
+ libs = options[key].value.copy()
+ assert isinstance(libs, list)
+ for l in libs:
+ assert isinstance(l, str)
+ return libs
+
+
+class VisualStudioCCompiler(MSVCCompiler, VisualStudioLikeCCompilerMixin, CCompiler):
+
+ _C11_VERSION = '>=19.28'
+ _C17_VERSION = '>=19.28'
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo', target: str,
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker,
+ full_version=full_version)
+ MSVCCompiler.__init__(self, target)
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = super().get_options()
+ c_stds = ['c89', 'c99']
+ # Need to have these to be compatible with projects
+ # that set c_std to e.g. gnu99.
+ # https://github.com/mesonbuild/meson/issues/7611
+ g_stds = ['gnu89', 'gnu90', 'gnu9x', 'gnu99']
+ if version_compare(self.version, self._C11_VERSION):
+ c_stds += ['c11']
+ g_stds += ['gnu1x', 'gnu11']
+ if version_compare(self.version, self._C17_VERSION):
+ c_stds += ['c17', 'c18']
+ g_stds += ['gnu17', 'gnu18']
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts[key].choices = ['none'] + c_stds + g_stds
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ std = options[OptionKey('std', machine=self.for_machine, lang=self.language)]
+ if std.value.startswith('gnu'):
+ mlog.log_once(
+ 'cl.exe does not actually support gnu standards, and meson '
+ 'will instead demote to the nearest ISO C standard. This '
+ 'may cause compilation to fail.')
+ # As of MVSC 16.8, /std:c11 and /std:c17 are the only valid C standard options.
+ if std.value in {'c11', 'gnu1x', 'gnu11'}:
+ args.append('/std:c11')
+ elif std.value in {'c17', 'c18', 'gnu17', 'gnu18'}:
+ args.append('/std:c17')
+ return args
+
+
+class ClangClCCompiler(_ClangCStds, ClangClCompiler, VisualStudioLikeCCompilerMixin, CCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo', target: str,
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker,
+ full_version=full_version)
+ ClangClCompiler.__init__(self, target)
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key].value
+ if std != "none":
+ return [f'/clang:-std={std}']
+ return []
+
+
+class IntelClCCompiler(IntelVisualStudioLikeCompiler, VisualStudioLikeCCompilerMixin, CCompiler):
+
+ """Intel "ICL" compiler abstraction."""
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo', target: str,
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker,
+ full_version=full_version)
+ IntelVisualStudioLikeCompiler.__init__(self, target)
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = super().get_options()
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts[key].choices = ['none', 'c89', 'c99', 'c11']
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key]
+ if std.value == 'c89':
+ mlog.log_once("ICL doesn't explicitly implement c89, setting the standard to 'none', which is close.")
+ elif std.value != 'none':
+ args.append('/Qstd:' + std.value)
+ return args
+
+
+class ArmCCompiler(ArmCompiler, CCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo',
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker,
+ full_version=full_version)
+ ArmCompiler.__init__(self)
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = CCompiler.get_options(self)
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts[key].choices = ['none', 'c89', 'c99', 'c11']
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key]
+ if std.value != 'none':
+ args.append('--' + std.value)
+ return args
+
+
+class CcrxCCompiler(CcrxCompiler, CCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo',
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ CcrxCompiler.__init__(self)
+
+ # Override CCompiler.get_always_args
+ def get_always_args(self) -> T.List[str]:
+ return ['-nologo']
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = CCompiler.get_options(self)
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts[key].choices = ['none', 'c89', 'c99']
+ return opts
+
+ def get_no_stdinc_args(self) -> T.List[str]:
+ return []
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key]
+ if std.value == 'c89':
+ args.append('-lang=c')
+ elif std.value == 'c99':
+ args.append('-lang=c99')
+ return args
+
+ def get_compile_only_args(self) -> T.List[str]:
+ return []
+
+ def get_no_optimization_args(self) -> T.List[str]:
+ return ['-optimize=0']
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return [f'-output=obj={target}']
+
+ def get_werror_args(self) -> T.List[str]:
+ return ['-change_message=error']
+
+ def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+ if path == '':
+ path = '.'
+ return ['-include=' + path]
+
+
+class Xc16CCompiler(Xc16Compiler, CCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo',
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ Xc16Compiler.__init__(self)
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = CCompiler.get_options(self)
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts[key].choices = ['none', 'c89', 'c99', 'gnu89', 'gnu99']
+ return opts
+
+ def get_no_stdinc_args(self) -> T.List[str]:
+ return []
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key]
+ if std.value != 'none':
+ args.append('-ansi')
+ args.append('-std=' + std.value)
+ return args
+
+ def get_compile_only_args(self) -> T.List[str]:
+ return []
+
+ def get_no_optimization_args(self) -> T.List[str]:
+ return ['-O0']
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return [f'-o{target}']
+
+ def get_werror_args(self) -> T.List[str]:
+ return ['-change_message=error']
+
+ def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+ if path == '':
+ path = '.'
+ return ['-I' + path]
+
+class CompCertCCompiler(CompCertCompiler, CCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo',
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ CompCertCompiler.__init__(self)
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = CCompiler.get_options(self)
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts[key].choices = ['none', 'c89', 'c99']
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return []
+
+ def get_no_optimization_args(self) -> T.List[str]:
+ return ['-O0']
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return [f'-o{target}']
+
+ def get_werror_args(self) -> T.List[str]:
+ return ['-Werror']
+
+ def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+ if path == '':
+ path = '.'
+ return ['-I' + path]
+
+class C2000CCompiler(C2000Compiler, CCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo',
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ C2000Compiler.__init__(self)
+
+ # Override CCompiler.get_always_args
+ def get_always_args(self) -> T.List[str]:
+ return []
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = CCompiler.get_options(self)
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts[key].choices = ['none', 'c89', 'c99', 'c11']
+ return opts
+
+ def get_no_stdinc_args(self) -> T.List[str]:
+ return []
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key]
+ if std.value != 'none':
+ args.append('--' + std.value)
+ return args
+
+ def get_compile_only_args(self) -> T.List[str]:
+ return []
+
+ def get_no_optimization_args(self) -> T.List[str]:
+ return ['-Ooff']
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return [f'--output_file={target}']
+
+ def get_werror_args(self) -> T.List[str]:
+ return ['-change_message=error']
+
+ def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+ if path == '':
+ path = '.'
+ return ['--include_path=' + path]
diff --git a/meson/mesonbuild/compilers/c_function_attributes.py b/meson/mesonbuild/compilers/c_function_attributes.py
new file mode 100644
index 000000000..f31229e09
--- /dev/null
+++ b/meson/mesonbuild/compilers/c_function_attributes.py
@@ -0,0 +1,132 @@
+# These functions are based on the following code:
+# https://git.savannah.gnu.org/gitweb/?p=autoconf-archive.git;a=blob_plain;f=m4/ax_gcc_func_attribute.m4,
+# which is licensed under the following terms:
+#
+# Copyright (c) 2013 Gabriele Svelto <gabriele.svelto@gmail.com>
+#
+# Copying and distribution of this file, with or without modification, are
+# permitted in any medium without royalty provided the copyright notice
+# and this notice are preserved. This file is offered as-is, without any
+# warranty.
+#
+
+C_FUNC_ATTRIBUTES = {
+ 'alias': '''
+ int foo(void) { return 0; }
+ int bar(void) __attribute__((alias("foo")));''',
+ 'aligned':
+ 'int foo(void) __attribute__((aligned(32)));',
+ 'alloc_size':
+ 'void *foo(int a) __attribute__((alloc_size(1)));',
+ 'always_inline':
+ 'inline __attribute__((always_inline)) int foo(void) { return 0; }',
+ 'artificial':
+ 'inline __attribute__((artificial)) int foo(void) { return 0; }',
+ 'cold':
+ 'int foo(void) __attribute__((cold));',
+ 'const':
+ 'int foo(void) __attribute__((const));',
+ 'constructor':
+ 'int foo(void) __attribute__((constructor));',
+ 'constructor_priority':
+ 'int foo( void ) __attribute__((__constructor__(65535/2)));',
+ 'deprecated':
+ 'int foo(void) __attribute__((deprecated("")));',
+ 'destructor':
+ 'int foo(void) __attribute__((destructor));',
+ 'dllexport':
+ '__declspec(dllexport) int foo(void) { return 0; }',
+ 'dllimport':
+ '__declspec(dllimport) int foo(void);',
+ 'error':
+ 'int foo(void) __attribute__((error("")));',
+ 'externally_visible':
+ 'int foo(void) __attribute__((externally_visible));',
+ 'fallthrough': '''
+ int foo( void ) {
+ switch (0) {
+ case 1: __attribute__((fallthrough));
+ case 2: break;
+ }
+ return 0;
+ };''',
+ 'flatten':
+ 'int foo(void) __attribute__((flatten));',
+ 'format':
+ 'int foo(const char * p, ...) __attribute__((format(printf, 1, 2)));',
+ 'format_arg':
+ 'char * foo(const char * p) __attribute__((format_arg(1)));',
+ 'force_align_arg_pointer':
+ '__attribute__((force_align_arg_pointer)) int foo(void) { return 0; }',
+ 'gnu_inline':
+ 'inline __attribute__((gnu_inline)) int foo(void) { return 0; }',
+ 'hot':
+ 'int foo(void) __attribute__((hot));',
+ 'ifunc':
+ ('int my_foo(void) { return 0; }'
+ 'static int (*resolve_foo(void))(void) { return my_foo; }'
+ 'int foo(void) __attribute__((ifunc("resolve_foo")));'),
+ 'leaf':
+ '__attribute__((leaf)) int foo(void) { return 0; }',
+ 'malloc':
+ 'int *foo(void) __attribute__((malloc));',
+ 'noclone':
+ 'int foo(void) __attribute__((noclone));',
+ 'noinline':
+ '__attribute__((noinline)) int foo(void) { return 0; }',
+ 'nonnull':
+ 'int foo(char * p) __attribute__((nonnull(1)));',
+ 'noreturn':
+ 'int foo(void) __attribute__((noreturn));',
+ 'nothrow':
+ 'int foo(void) __attribute__((nothrow));',
+ 'optimize':
+ '__attribute__((optimize(3))) int foo(void) { return 0; }',
+ 'packed':
+ 'struct __attribute__((packed)) foo { int bar; };',
+ 'pure':
+ 'int foo(void) __attribute__((pure));',
+ 'returns_nonnull':
+ 'int *foo(void) __attribute__((returns_nonnull));',
+ 'unused':
+ 'int foo(void) __attribute__((unused));',
+ 'used':
+ 'int foo(void) __attribute__((used));',
+ 'visibility': '''
+ int foo_def(void) __attribute__((visibility("default")));
+ int foo_hid(void) __attribute__((visibility("hidden")));
+ int foo_int(void) __attribute__((visibility("internal")));''',
+ 'visibility:default':
+ 'int foo(void) __attribute__((visibility("default")));',
+ 'visibility:hidden':
+ 'int foo(void) __attribute__((visibility("hidden")));',
+ 'visibility:internal':
+ 'int foo(void) __attribute__((visibility("internal")));',
+ 'visibility:protected':
+ 'int foo(void) __attribute__((visibility("protected")));',
+ 'warning':
+ 'int foo(void) __attribute__((warning("")));',
+ 'warn_unused_result':
+ 'int foo(void) __attribute__((warn_unused_result));',
+ 'weak':
+ 'int foo(void) __attribute__((weak));',
+ 'weakref': '''
+ static int foo(void) { return 0; }
+ static int var(void) __attribute__((weakref("foo")));''',
+}
+
+CXX_FUNC_ATTRIBUTES = {
+ # Alias must be applied to the mangled name in C++
+ 'alias':
+ ('extern "C" {'
+ 'int foo(void) { return 0; }'
+ '}'
+ 'int bar(void) __attribute__((alias("foo")));'
+ ),
+ 'ifunc':
+ ('extern "C" {'
+ 'int my_foo(void) { return 0; }'
+ 'static int (*resolve_foo(void))(void) { return my_foo; }'
+ '}'
+ 'int foo(void) __attribute__((ifunc("resolve_foo")));'),
+}
diff --git a/meson/mesonbuild/compilers/compilers.py b/meson/mesonbuild/compilers/compilers.py
new file mode 100644
index 000000000..0aae6e528
--- /dev/null
+++ b/meson/mesonbuild/compilers/compilers.py
@@ -0,0 +1,1294 @@
+# Copyright 2012-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import abc
+import contextlib, os.path, re
+import enum
+import itertools
+import typing as T
+from functools import lru_cache
+
+from .. import coredata
+from .. import mlog
+from .. import mesonlib
+from ..mesonlib import (
+ HoldableObject,
+ EnvironmentException, MachineChoice, MesonException,
+ Popen_safe, LibType, TemporaryDirectoryWinProof, OptionKey,
+)
+
+from ..arglist import CompilerArgs
+
+if T.TYPE_CHECKING:
+ from ..build import BuildTarget
+ from ..coredata import OptionDictType, KeyedOptionDictType
+ from ..envconfig import MachineInfo
+ from ..environment import Environment
+ from ..linkers import DynamicLinker, RSPFileSyntax
+ from ..dependencies import Dependency
+
+ CompilerType = T.TypeVar('CompilerType', bound=Compiler)
+ _T = T.TypeVar('_T')
+
+"""This file contains the data files of all compilers Meson knows
+about. To support a new compiler, add its information below.
+Also add corresponding autodetection code in environment.py."""
+
+header_suffixes = ('h', 'hh', 'hpp', 'hxx', 'H', 'ipp', 'moc', 'vapi', 'di') # type: T.Tuple[str, ...]
+obj_suffixes = ('o', 'obj', 'res') # type: T.Tuple[str, ...]
+lib_suffixes = ('a', 'lib', 'dll', 'dll.a', 'dylib', 'so') # type: T.Tuple[str, ...]
+# Mapping of language to suffixes of files that should always be in that language
+# This means we can't include .h headers here since they could be C, C++, ObjC, etc.
+lang_suffixes = {
+ 'c': ('c',),
+ 'cpp': ('cpp', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx', 'ino', 'ixx', 'C'),
+ 'cuda': ('cu',),
+ # f90, f95, f03, f08 are for free-form fortran ('f90' recommended)
+ # f, for, ftn, fpp are for fixed-form fortran ('f' or 'for' recommended)
+ 'fortran': ('f90', 'f95', 'f03', 'f08', 'f', 'for', 'ftn', 'fpp'),
+ 'd': ('d', 'di'),
+ 'objc': ('m',),
+ 'objcpp': ('mm',),
+ 'rust': ('rs',),
+ 'vala': ('vala', 'vapi', 'gs'),
+ 'cs': ('cs',),
+ 'swift': ('swift',),
+ 'java': ('java',),
+ 'cython': ('pyx', ),
+} # type: T.Dict[str, T.Tuple[str, ...]]
+all_languages = lang_suffixes.keys()
+cpp_suffixes = lang_suffixes['cpp'] + ('h',) # type: T.Tuple[str, ...]
+c_suffixes = lang_suffixes['c'] + ('h',) # type: T.Tuple[str, ...]
+# List of languages that by default consume and output libraries following the
+# C ABI; these can generally be used interchangeably
+clib_langs = ('objcpp', 'cpp', 'objc', 'c', 'fortran',) # type: T.Tuple[str, ...]
+# List of assembler suffixes that can be linked with C code directly by the linker
+assembler_suffixes: T.Tuple[str, ...] = ('s', 'S')
+# List of languages that can be linked with C code directly by the linker
+# used in build.py:process_compilers() and build.py:get_dynamic_linker()
+clink_langs = ('d', 'cuda') + clib_langs # type: T.Tuple[str, ...]
+clink_suffixes = tuple() # type: T.Tuple[str, ...]
+for _l in clink_langs + ('vala',):
+ clink_suffixes += lang_suffixes[_l]
+clink_suffixes += ('h', 'll', 's')
+all_suffixes = set(itertools.chain(*lang_suffixes.values(), clink_suffixes)) # type: T.Set[str]
+
+# Languages that should use LDFLAGS arguments when linking.
+LANGUAGES_USING_LDFLAGS = {'objcpp', 'cpp', 'objc', 'c', 'fortran', 'd', 'cuda'} # type: T.Set[str]
+# Languages that should use CPPFLAGS arguments when linking.
+LANGUAGES_USING_CPPFLAGS = {'c', 'cpp', 'objc', 'objcpp'} # type: T.Set[str]
+soregex = re.compile(r'.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
+
+# Environment variables that each lang uses.
+CFLAGS_MAPPING: T.Mapping[str, str] = {
+ 'c': 'CFLAGS',
+ 'cpp': 'CXXFLAGS',
+ 'cuda': 'CUFLAGS',
+ 'objc': 'OBJCFLAGS',
+ 'objcpp': 'OBJCXXFLAGS',
+ 'fortran': 'FFLAGS',
+ 'd': 'DFLAGS',
+ 'vala': 'VALAFLAGS',
+ 'rust': 'RUSTFLAGS',
+ 'cython': 'CYTHONFLAGS',
+}
+
+CEXE_MAPPING: T.Mapping = {
+ 'c': 'CC',
+ 'cpp': 'CXX',
+}
+
+# All these are only for C-linkable languages; see `clink_langs` above.
+
+def sort_clink(lang: str) -> int:
+ '''
+ Sorting function to sort the list of languages according to
+ reversed(compilers.clink_langs) and append the unknown langs in the end.
+ The purpose is to prefer C over C++ for files that can be compiled by
+ both such as assembly, C, etc. Also applies to ObjC, ObjC++, etc.
+ '''
+ if lang not in clink_langs:
+ return 1
+ return -clink_langs.index(lang)
+
+def is_header(fname: 'mesonlib.FileOrString') -> bool:
+ if isinstance(fname, mesonlib.File):
+ fname = fname.fname
+ suffix = fname.split('.')[-1]
+ return suffix in header_suffixes
+
+def is_source(fname: 'mesonlib.FileOrString') -> bool:
+ if isinstance(fname, mesonlib.File):
+ fname = fname.fname
+ suffix = fname.split('.')[-1].lower()
+ return suffix in clink_suffixes
+
+def is_assembly(fname: 'mesonlib.FileOrString') -> bool:
+ if isinstance(fname, mesonlib.File):
+ fname = fname.fname
+ return fname.split('.')[-1].lower() == 's'
+
+def is_llvm_ir(fname: 'mesonlib.FileOrString') -> bool:
+ if isinstance(fname, mesonlib.File):
+ fname = fname.fname
+ return fname.split('.')[-1] == 'll'
+
+@lru_cache(maxsize=None)
+def cached_by_name(fname: 'mesonlib.FileOrString') -> bool:
+ suffix = fname.split('.')[-1]
+ return suffix in obj_suffixes
+
+def is_object(fname: 'mesonlib.FileOrString') -> bool:
+ if isinstance(fname, mesonlib.File):
+ fname = fname.fname
+ return cached_by_name(fname)
+
+def is_library(fname: 'mesonlib.FileOrString') -> bool:
+ if isinstance(fname, mesonlib.File):
+ fname = fname.fname
+
+ if soregex.match(fname):
+ return True
+
+ suffix = fname.split('.')[-1]
+ return suffix in lib_suffixes
+
+def is_known_suffix(fname: 'mesonlib.FileOrString') -> bool:
+ if isinstance(fname, mesonlib.File):
+ fname = fname.fname
+ suffix = fname.split('.')[-1]
+
+ return suffix in all_suffixes
+
+
+class CompileCheckMode(enum.Enum):
+
+ PREPROCESS = 'preprocess'
+ COMPILE = 'compile'
+ LINK = 'link'
+
+
+cuda_buildtype_args = {'plain': [],
+ 'debug': ['-g', '-G'],
+ 'debugoptimized': ['-g', '-lineinfo'],
+ 'release': [],
+ 'minsize': [],
+ 'custom': [],
+ } # type: T.Dict[str, T.List[str]]
+java_buildtype_args = {'plain': [],
+ 'debug': ['-g'],
+ 'debugoptimized': ['-g'],
+ 'release': [],
+ 'minsize': [],
+ 'custom': [],
+ } # type: T.Dict[str, T.List[str]]
+
+rust_buildtype_args = {'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
+ 'custom': [],
+ } # type: T.Dict[str, T.List[str]]
+
+d_gdc_buildtype_args = {'plain': [],
+ 'debug': [],
+ 'debugoptimized': ['-finline-functions'],
+ 'release': ['-finline-functions'],
+ 'minsize': [],
+ 'custom': [],
+ } # type: T.Dict[str, T.List[str]]
+
+d_ldc_buildtype_args = {'plain': [],
+ 'debug': [],
+ 'debugoptimized': ['-enable-inlining', '-Hkeep-all-bodies'],
+ 'release': ['-enable-inlining', '-Hkeep-all-bodies'],
+ 'minsize': [],
+ 'custom': [],
+ } # type: T.Dict[str, T.List[str]]
+
+d_dmd_buildtype_args = {'plain': [],
+ 'debug': [],
+ 'debugoptimized': ['-inline'],
+ 'release': ['-inline'],
+ 'minsize': [],
+ 'custom': [],
+ } # type: T.Dict[str, T.List[str]]
+
+mono_buildtype_args = {'plain': [],
+ 'debug': [],
+ 'debugoptimized': ['-optimize+'],
+ 'release': ['-optimize+'],
+ 'minsize': [],
+ 'custom': [],
+ } # type: T.Dict[str, T.List[str]]
+
+swift_buildtype_args = {'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
+ 'custom': [],
+ } # type: T.Dict[str, T.List[str]]
+
+gnu_winlibs = ['-lkernel32', '-luser32', '-lgdi32', '-lwinspool', '-lshell32',
+ '-lole32', '-loleaut32', '-luuid', '-lcomdlg32', '-ladvapi32'] # type: T.List[str]
+
+msvc_winlibs = ['kernel32.lib', 'user32.lib', 'gdi32.lib',
+ 'winspool.lib', 'shell32.lib', 'ole32.lib', 'oleaut32.lib',
+ 'uuid.lib', 'comdlg32.lib', 'advapi32.lib'] # type: T.List[str]
+
+clike_optimization_args = {'0': [],
+ 'g': [],
+ '1': ['-O1'],
+ '2': ['-O2'],
+ '3': ['-O3'],
+ 's': ['-Os'],
+ } # type: T.Dict[str, T.List[str]]
+
+cuda_optimization_args = {'0': [],
+ 'g': ['-O0'],
+ '1': ['-O1'],
+ '2': ['-O2'],
+ '3': ['-O3'],
+ 's': ['-O3']
+ } # type: T.Dict[str, T.List[str]]
+
+cuda_debug_args = {False: [],
+ True: ['-g']} # type: T.Dict[bool, T.List[str]]
+
+clike_debug_args = {False: [],
+ True: ['-g']} # type: T.Dict[bool, T.List[str]]
+
+base_options: 'KeyedOptionDictType' = {
+ OptionKey('b_pch'): coredata.UserBooleanOption('Use precompiled headers', True),
+ OptionKey('b_lto'): coredata.UserBooleanOption('Use link time optimization', False),
+ OptionKey('b_lto'): coredata.UserBooleanOption('Use link time optimization', False),
+ OptionKey('b_lto_threads'): coredata.UserIntegerOption('Use multiple threads for Link Time Optimization', (None, None,0)),
+ OptionKey('b_lto_mode'): coredata.UserComboOption('Select between different LTO modes.',
+ ['default', 'thin'],
+ 'default'),
+ OptionKey('b_sanitize'): coredata.UserComboOption('Code sanitizer to use',
+ ['none', 'address', 'thread', 'undefined', 'memory', 'address,undefined'],
+ 'none'),
+ OptionKey('b_lundef'): coredata.UserBooleanOption('Use -Wl,--no-undefined when linking', True),
+ OptionKey('b_asneeded'): coredata.UserBooleanOption('Use -Wl,--as-needed when linking', True),
+ OptionKey('b_pgo'): coredata.UserComboOption('Use profile guided optimization',
+ ['off', 'generate', 'use'],
+ 'off'),
+ OptionKey('b_coverage'): coredata.UserBooleanOption('Enable coverage tracking.', False),
+ OptionKey('b_colorout'): coredata.UserComboOption('Use colored output',
+ ['auto', 'always', 'never'],
+ 'always'),
+ OptionKey('b_ndebug'): coredata.UserComboOption('Disable asserts', ['true', 'false', 'if-release'], 'false'),
+ OptionKey('b_staticpic'): coredata.UserBooleanOption('Build static libraries as position independent', True),
+ OptionKey('b_pie'): coredata.UserBooleanOption('Build executables as position independent', False),
+ OptionKey('b_bitcode'): coredata.UserBooleanOption('Generate and embed bitcode (only macOS/iOS/tvOS)', False),
+ OptionKey('b_vscrt'): coredata.UserComboOption('VS run-time library type to use.',
+ ['none', 'md', 'mdd', 'mt', 'mtd', 'from_buildtype', 'static_from_buildtype'],
+ 'from_buildtype'),
+}
+
+def option_enabled(boptions: T.Set[OptionKey], options: 'KeyedOptionDictType',
+ option: OptionKey) -> bool:
+ try:
+ if option not in boptions:
+ return False
+ ret = options[option].value
+ assert isinstance(ret, bool), 'must return bool' # could also be str
+ return ret
+ except KeyError:
+ return False
+
+
+def get_option_value(options: 'KeyedOptionDictType', opt: OptionKey, fallback: '_T') -> '_T':
+ """Get the value of an option, or the fallback value."""
+ try:
+ v: '_T' = options[opt].value
+ except KeyError:
+ return fallback
+
+ assert isinstance(v, type(fallback)), f'Should have {type(fallback)!r} but was {type(v)!r}'
+ # Mypy doesn't understand that the above assert ensures that v is type _T
+ return v
+
+
+def get_base_compile_args(options: 'KeyedOptionDictType', compiler: 'Compiler') -> T.List[str]:
+ args = [] # type T.List[str]
+ try:
+ if options[OptionKey('b_lto')].value:
+ args.extend(compiler.get_lto_compile_args(
+ threads=get_option_value(options, OptionKey('b_lto_threads'), 0),
+ mode=get_option_value(options, OptionKey('b_lto_mode'), 'default')))
+ except KeyError:
+ pass
+ try:
+ args += compiler.get_colorout_args(options[OptionKey('b_colorout')].value)
+ except KeyError:
+ pass
+ try:
+ args += compiler.sanitizer_compile_args(options[OptionKey('b_sanitize')].value)
+ except KeyError:
+ pass
+ try:
+ pgo_val = options[OptionKey('b_pgo')].value
+ if pgo_val == 'generate':
+ args.extend(compiler.get_profile_generate_args())
+ elif pgo_val == 'use':
+ args.extend(compiler.get_profile_use_args())
+ except KeyError:
+ pass
+ try:
+ if options[OptionKey('b_coverage')].value:
+ args += compiler.get_coverage_args()
+ except KeyError:
+ pass
+ try:
+ if (options[OptionKey('b_ndebug')].value == 'true' or
+ (options[OptionKey('b_ndebug')].value == 'if-release' and
+ options[OptionKey('buildtype')].value in {'release', 'plain'})):
+ args += compiler.get_disable_assert_args()
+ except KeyError:
+ pass
+ # This does not need a try...except
+ if option_enabled(compiler.base_options, options, OptionKey('b_bitcode')):
+ args.append('-fembed-bitcode')
+ try:
+ crt_val = options[OptionKey('b_vscrt')].value
+ buildtype = options[OptionKey('buildtype')].value
+ try:
+ args += compiler.get_crt_compile_args(crt_val, buildtype)
+ except AttributeError:
+ pass
+ except KeyError:
+ pass
+ return args
+
+def get_base_link_args(options: 'KeyedOptionDictType', linker: 'Compiler',
+ is_shared_module: bool) -> T.List[str]:
+ args = [] # type: T.List[str]
+ try:
+ if options[OptionKey('b_lto')].value:
+ args.extend(linker.get_lto_link_args(
+ threads=get_option_value(options, OptionKey('b_lto_threads'), 0),
+ mode=get_option_value(options, OptionKey('b_lto_mode'), 'default')))
+ except KeyError:
+ pass
+ try:
+ args += linker.sanitizer_link_args(options[OptionKey('b_sanitize')].value)
+ except KeyError:
+ pass
+ try:
+ pgo_val = options[OptionKey('b_pgo')].value
+ if pgo_val == 'generate':
+ args.extend(linker.get_profile_generate_args())
+ elif pgo_val == 'use':
+ args.extend(linker.get_profile_use_args())
+ except KeyError:
+ pass
+ try:
+ if options[OptionKey('b_coverage')].value:
+ args += linker.get_coverage_link_args()
+ except KeyError:
+ pass
+
+ as_needed = option_enabled(linker.base_options, options, OptionKey('b_asneeded'))
+ bitcode = option_enabled(linker.base_options, options, OptionKey('b_bitcode'))
+ # Shared modules cannot be built with bitcode_bundle because
+ # -bitcode_bundle is incompatible with -undefined and -bundle
+ if bitcode and not is_shared_module:
+ args.extend(linker.bitcode_args())
+ elif as_needed:
+ # -Wl,-dead_strip_dylibs is incompatible with bitcode
+ args.extend(linker.get_asneeded_args())
+
+ # Apple's ld (the only one that supports bitcode) does not like -undefined
+ # arguments or -headerpad_max_install_names when bitcode is enabled
+ if not bitcode:
+ args.extend(linker.headerpad_args())
+ if (not is_shared_module and
+ option_enabled(linker.base_options, options, OptionKey('b_lundef'))):
+ args.extend(linker.no_undefined_link_args())
+ else:
+ args.extend(linker.get_allow_undefined_link_args())
+
+ try:
+ crt_val = options[OptionKey('b_vscrt')].value
+ buildtype = options[OptionKey('buildtype')].value
+ try:
+ args += linker.get_crt_link_args(crt_val, buildtype)
+ except AttributeError:
+ pass
+ except KeyError:
+ pass
+ return args
+
+
+class CrossNoRunException(MesonException):
+ pass
+
+class RunResult(HoldableObject):
+ def __init__(self, compiled: bool, returncode: int = 999,
+ stdout: str = 'UNDEFINED', stderr: str = 'UNDEFINED'):
+ self.compiled = compiled
+ self.returncode = returncode
+ self.stdout = stdout
+ self.stderr = stderr
+
+
+class CompileResult(HoldableObject):
+
+ """The result of Compiler.compiles (and friends)."""
+
+ def __init__(self, stdo: T.Optional[str] = None, stde: T.Optional[str] = None,
+ args: T.Optional[T.List[str]] = None,
+ returncode: int = 999, pid: int = -1,
+ text_mode: bool = True,
+ input_name: T.Optional[str] = None,
+ output_name: T.Optional[str] = None,
+ command: T.Optional[T.List[str]] = None, cached: bool = False):
+ self.stdout = stdo
+ self.stderr = stde
+ self.input_name = input_name
+ self.output_name = output_name
+ self.command = command or []
+ self.args = args or []
+ self.cached = cached
+ self.returncode = returncode
+ self.pid = pid
+ self.text_mode = text_mode
+
+
+class Compiler(HoldableObject, metaclass=abc.ABCMeta):
+ # Libraries to ignore in find_library() since they are provided by the
+ # compiler or the C library. Currently only used for MSVC.
+ ignore_libs = [] # type: T.List[str]
+ # Libraries that are internal compiler implementations, and must not be
+ # manually searched.
+ internal_libs = [] # type: T.List[str]
+
+ LINKER_PREFIX = None # type: T.Union[None, str, T.List[str]]
+ INVOKES_LINKER = True
+
+ # TODO: these could be forward declarations once we drop 3.5 support
+ if T.TYPE_CHECKING:
+ language = 'unset'
+ id = ''
+ warn_args = {} # type: T.Dict[str, T.List[str]]
+
+ def __init__(self, exelist: T.List[str], version: str,
+ for_machine: MachineChoice, info: 'MachineInfo',
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None, is_cross: bool = False):
+ self.exelist = exelist
+ # In case it's been overridden by a child class already
+ if not hasattr(self, 'file_suffixes'):
+ self.file_suffixes = lang_suffixes[self.language]
+ if not hasattr(self, 'can_compile_suffixes'):
+ self.can_compile_suffixes = set(self.file_suffixes)
+ self.default_suffix = self.file_suffixes[0]
+ self.version = version
+ self.full_version = full_version
+ self.for_machine = for_machine
+ self.base_options: T.Set[OptionKey] = set()
+ self.linker = linker
+ self.info = info
+ self.is_cross = is_cross
+
+ def __repr__(self) -> str:
+ repr_str = "<{0}: v{1} `{2}`>"
+ return repr_str.format(self.__class__.__name__, self.version,
+ ' '.join(self.exelist))
+
+ @lru_cache(maxsize=None)
+ def can_compile(self, src: 'mesonlib.FileOrString') -> bool:
+ if isinstance(src, mesonlib.File):
+ src = src.fname
+ suffix = os.path.splitext(src)[1]
+ if suffix != '.C':
+ suffix = suffix.lower()
+ return bool(suffix) and suffix[1:] in self.can_compile_suffixes
+
+ def get_id(self) -> str:
+ return self.id
+
+ def get_linker_id(self) -> str:
+ # There is not guarantee that we have a dynamic linker instance, as
+ # some languages don't have separate linkers and compilers. In those
+ # cases return the compiler id
+ try:
+ return self.linker.id
+ except AttributeError:
+ return self.id
+
+ def get_version_string(self) -> str:
+ details = [self.id, self.version]
+ if self.full_version:
+ details += ['"%s"' % (self.full_version)]
+ return '(%s)' % (' '.join(details))
+
+ def get_language(self) -> str:
+ return self.language
+
+ @classmethod
+ def get_display_language(cls) -> str:
+ return cls.language.capitalize()
+
+ def get_default_suffix(self) -> str:
+ return self.default_suffix
+
+ def get_define(self, dname: str, prefix: str, env: 'Environment',
+ extra_args: T.List[str], dependencies: T.List['Dependency'],
+ disable_cache: bool = False) -> T.Tuple[str, bool]:
+ raise EnvironmentException('%s does not support get_define ' % self.get_id())
+
+ def compute_int(self, expression: str, low: T.Optional[int], high: T.Optional[int],
+ guess: T.Optional[int], prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]], dependencies: T.Optional[T.List['Dependency']]) -> int:
+ raise EnvironmentException('%s does not support compute_int ' % self.get_id())
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+ build_dir: str) -> T.List[str]:
+ raise EnvironmentException('%s does not support compute_parameters_with_absolute_paths ' % self.get_id())
+
+ def has_members(self, typename: str, membernames: T.List[str],
+ prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
+ raise EnvironmentException('%s does not support has_member(s) ' % self.get_id())
+
+ def has_type(self, typename: str, prefix: str, env: 'Environment',
+ extra_args: T.List[str], *,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
+ raise EnvironmentException('%s does not support has_type ' % self.get_id())
+
+ def symbols_have_underscore_prefix(self, env: 'Environment') -> bool:
+ raise EnvironmentException('%s does not support symbols_have_underscore_prefix ' % self.get_id())
+
+ def get_exelist(self) -> T.List[str]:
+ return self.exelist.copy()
+
+ def get_linker_exelist(self) -> T.List[str]:
+ return self.linker.get_exelist()
+
+ @abc.abstractmethod
+ def get_output_args(self, outputname: str) -> T.List[str]:
+ pass
+
+ def get_linker_output_args(self, outputname: str) -> T.List[str]:
+ return self.linker.get_output_args(outputname)
+
+ def get_linker_search_args(self, dirname: str) -> T.List[str]:
+ return self.linker.get_search_args(dirname)
+
+ def get_builtin_define(self, define: str) -> T.Optional[str]:
+ raise EnvironmentException('%s does not support get_builtin_define.' % self.id)
+
+ def has_builtin_define(self, define: str) -> bool:
+ raise EnvironmentException('%s does not support has_builtin_define.' % self.id)
+
+ def get_always_args(self) -> T.List[str]:
+ return []
+
+ def can_linker_accept_rsp(self) -> bool:
+ """
+ Determines whether the linker can accept arguments using the @rsp syntax.
+ """
+ return self.linker.get_accepts_rsp()
+
+ def get_linker_always_args(self) -> T.List[str]:
+ return self.linker.get_always_args()
+
+ def get_linker_lib_prefix(self) -> str:
+ return self.linker.get_lib_prefix()
+
+ def gen_import_library_args(self, implibname: str) -> T.List[str]:
+ """
+ Used only on Windows for libraries that need an import library.
+ This currently means C, C++, Fortran.
+ """
+ return []
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ return {}
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return []
+
+ def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return self.linker.get_option_args(options)
+
+ def check_header(self, hname: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
+ """Check that header is usable.
+
+ Returns a two item tuple of bools. The first bool is whether the
+ check succeeded, the second is whether the result was cached (True)
+ or run fresh (False).
+ """
+ raise EnvironmentException('Language %s does not support header checks.' % self.get_display_language())
+
+ def has_header(self, hname: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None,
+ disable_cache: bool = False) -> T.Tuple[bool, bool]:
+ """Check that header is exists.
+
+ This check will return true if the file exists, even if it contains:
+
+ ```c
+ # error "You thought you could use this, LOLZ!"
+ ```
+
+ Use check_header if your header only works in some cases.
+
+ Returns a two item tuple of bools. The first bool is whether the
+ check succeeded, the second is whether the result was cached (True)
+ or run fresh (False).
+ """
+ raise EnvironmentException('Language %s does not support header checks.' % self.get_display_language())
+
+ def has_header_symbol(self, hname: str, symbol: str, prefix: str,
+ env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
+ raise EnvironmentException('Language %s does not support header symbol checks.' % self.get_display_language())
+
+ def run(self, code: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> RunResult:
+ raise EnvironmentException('Language %s does not support run checks.' % self.get_display_language())
+
+ def sizeof(self, typename: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> int:
+ raise EnvironmentException('Language %s does not support sizeof checks.' % self.get_display_language())
+
+ def alignment(self, typename: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> int:
+ raise EnvironmentException('Language %s does not support alignment checks.' % self.get_display_language())
+
+ def has_function(self, funcname: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
+ """See if a function exists.
+
+ Returns a two item tuple of bools. The first bool is whether the
+ check succeeded, the second is whether the result was cached (True)
+ or run fresh (False).
+ """
+ raise EnvironmentException('Language %s does not support function checks.' % self.get_display_language())
+
+ def unix_args_to_native(self, args: T.List[str]) -> T.List[str]:
+ "Always returns a copy that can be independently mutated"
+ return args.copy()
+
+ @classmethod
+ def native_args_to_unix(cls, args: T.List[str]) -> T.List[str]:
+ "Always returns a copy that can be independently mutated"
+ return args.copy()
+
+ def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
+ libtype: LibType = LibType.PREFER_SHARED) -> T.Optional[T.List[str]]:
+ raise EnvironmentException(f'Language {self.get_display_language()} does not support library finding.')
+
+ def get_library_naming(self, env: 'Environment', libtype: LibType,
+ strict: bool = False) -> T.Optional[T.Tuple[str, ...]]:
+ raise EnvironmentException(
+ 'Language {} does not support get_library_naming.'.format(
+ self.get_display_language()))
+
+ def get_program_dirs(self, env: 'Environment') -> T.List[str]:
+ return []
+
+ def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
+ raise EnvironmentException(
+ 'Language {} does not support has_multi_arguments.'.format(
+ self.get_display_language()))
+
+ def has_multi_link_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
+ return self.linker.has_multi_arguments(args, env)
+
+ def _get_compile_output(self, dirname: str, mode: str) -> str:
+ # TODO: mode should really be an enum
+ # In pre-processor mode, the output is sent to stdout and discarded
+ if mode == 'preprocess':
+ return None
+ # Extension only matters if running results; '.exe' is
+ # guaranteed to be executable on every platform.
+ if mode == 'link':
+ suffix = 'exe'
+ else:
+ suffix = 'obj'
+ return os.path.join(dirname, 'output.' + suffix)
+
+ def get_compiler_args_for_mode(self, mode: CompileCheckMode) -> T.List[str]:
+ # TODO: mode should really be an enum
+ args = [] # type: T.List[str]
+ args += self.get_always_args()
+ if mode is CompileCheckMode.COMPILE:
+ args += self.get_compile_only_args()
+ elif mode is CompileCheckMode.PREPROCESS:
+ args += self.get_preprocess_only_args()
+ else:
+ assert mode is CompileCheckMode.LINK
+ return args
+
+ def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CompilerArgs:
+ """Return an appropriate CompilerArgs instance for this class."""
+ return CompilerArgs(self, args)
+
+ @contextlib.contextmanager
+ def compile(self, code: 'mesonlib.FileOrString',
+ extra_args: T.Union[None, CompilerArgs, T.List[str]] = None,
+ *, mode: str = 'link', want_output: bool = False,
+ temp_dir: T.Optional[str] = None) -> T.Iterator[T.Optional[CompileResult]]:
+ # TODO: there isn't really any reason for this to be a contextmanager
+ if extra_args is None:
+ extra_args = []
+
+ with TemporaryDirectoryWinProof(dir=temp_dir) as tmpdirname:
+ no_ccache = False
+ if isinstance(code, str):
+ srcname = os.path.join(tmpdirname,
+ 'testfile.' + self.default_suffix)
+ with open(srcname, 'w', encoding='utf-8') as ofile:
+ ofile.write(code)
+ # ccache would result in a cache miss
+ no_ccache = True
+ contents = code
+ elif isinstance(code, mesonlib.File):
+ srcname = code.fname
+ with open(code.fname, encoding='utf-8') as f:
+ contents = f.read()
+
+ # Construct the compiler command-line
+ commands = self.compiler_args()
+ commands.append(srcname)
+ # Preprocess mode outputs to stdout, so no output args
+ if mode != 'preprocess':
+ output = self._get_compile_output(tmpdirname, mode)
+ commands += self.get_output_args(output)
+ commands.extend(self.get_compiler_args_for_mode(CompileCheckMode(mode)))
+ # extra_args must be last because it could contain '/link' to
+ # pass args to VisualStudio's linker. In that case everything
+ # in the command line after '/link' is given to the linker.
+ commands += extra_args
+ # Generate full command-line with the exelist
+ command_list = self.get_exelist() + commands.to_native()
+ mlog.debug('Running compile:')
+ mlog.debug('Working directory: ', tmpdirname)
+ mlog.debug('Command line: ', ' '.join(command_list), '\n')
+ mlog.debug('Code:\n', contents)
+ os_env = os.environ.copy()
+ os_env['LC_ALL'] = 'C'
+ if no_ccache:
+ os_env['CCACHE_DISABLE'] = '1'
+ p, stdo, stde = Popen_safe(command_list, cwd=tmpdirname, env=os_env)
+ mlog.debug('Compiler stdout:\n', stdo)
+ mlog.debug('Compiler stderr:\n', stde)
+
+ result = CompileResult(stdo, stde, list(commands), p.returncode, p.pid, input_name=srcname)
+ if want_output:
+ result.output_name = output
+ yield result
+
+ @contextlib.contextmanager
+ def cached_compile(self, code: str, cdata: coredata.CoreData, *,
+ extra_args: T.Union[None, T.List[str], CompilerArgs] = None,
+ mode: str = 'link',
+ temp_dir: T.Optional[str] = None) -> T.Iterator[T.Optional[CompileResult]]:
+ # TODO: There's isn't really any reason for this to be a context manager
+
+ # Calculate the key
+ textra_args = tuple(extra_args) if extra_args is not None else tuple() # type: T.Tuple[str, ...]
+ key = (tuple(self.exelist), self.version, code, textra_args, mode) # type: coredata.CompilerCheckCacheKey
+
+ # Check if not cached, and generate, otherwise get from the cache
+ if key in cdata.compiler_check_cache:
+ p = cdata.compiler_check_cache[key] # type: CompileResult
+ p.cached = True
+ mlog.debug('Using cached compile:')
+ mlog.debug('Cached command line: ', ' '.join(p.command), '\n')
+ mlog.debug('Code:\n', code)
+ mlog.debug('Cached compiler stdout:\n', p.stdout)
+ mlog.debug('Cached compiler stderr:\n', p.stderr)
+ yield p
+ else:
+ with self.compile(code, extra_args=extra_args, mode=mode, want_output=False, temp_dir=temp_dir) as p:
+ cdata.compiler_check_cache[key] = p
+ yield p
+
+ def get_colorout_args(self, colortype: str) -> T.List[str]:
+ # TODO: colortype can probably be an emum
+ return []
+
+ # Some compilers (msvc) write debug info to a separate file.
+ # These args specify where it should be written.
+ def get_compile_debugfile_args(self, rel_obj: str, pch: bool = False) -> T.List[str]:
+ return []
+
+ def get_link_debugfile_name(self, targetfile: str) -> str:
+ return self.linker.get_debugfile_name(targetfile)
+
+ def get_link_debugfile_args(self, targetfile: str) -> T.List[str]:
+ return self.linker.get_debugfile_args(targetfile)
+
+ def get_std_shared_lib_link_args(self) -> T.List[str]:
+ return self.linker.get_std_shared_lib_args()
+
+ def get_std_shared_module_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return self.linker.get_std_shared_module_args(options)
+
+ def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+ return self.linker.get_link_whole_for(args)
+
+ def get_allow_undefined_link_args(self) -> T.List[str]:
+ return self.linker.get_allow_undefined_args()
+
+ def no_undefined_link_args(self) -> T.List[str]:
+ return self.linker.no_undefined_args()
+
+ def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]:
+ """Compiler arguments needed to enable the given instruction set.
+
+ Return type ay be an empty list meaning nothing needed or None
+ meaning the given set is not supported.
+ """
+ return None
+
+ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+ rpath_paths: str, build_rpath: str,
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ return self.linker.build_rpath_args(
+ env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+
+ def thread_flags(self, env: 'Environment') -> T.List[str]:
+ return []
+
+ def thread_link_flags(self, env: 'Environment') -> T.List[str]:
+ return self.linker.thread_flags(env)
+
+ def openmp_flags(self) -> T.List[str]:
+ raise EnvironmentException('Language %s does not support OpenMP flags.' % self.get_display_language())
+
+ def openmp_link_flags(self) -> T.List[str]:
+ return self.openmp_flags()
+
+ def language_stdlib_only_link_flags(self) -> T.List[str]:
+ return []
+
+ def gnu_symbol_visibility_args(self, vistype: str) -> T.List[str]:
+ return []
+
+ def get_gui_app_args(self, value: bool) -> T.List[str]:
+ # Only used on Windows
+ return self.linker.get_gui_app_args(value)
+
+ def get_win_subsystem_args(self, value: str) -> T.List[str]:
+ # By default the dynamic linker is going to return an empty
+ # array in case it either doesn't support Windows subsystems
+ # or does not target Windows
+ return self.linker.get_win_subsystem_args(value)
+
+ def has_func_attribute(self, name: str, env: 'Environment') -> T.Tuple[bool, bool]:
+ raise EnvironmentException(
+ f'Language {self.get_display_language()} does not support function attributes.')
+
+ def get_pic_args(self) -> T.List[str]:
+ m = 'Language {} does not support position-independent code'
+ raise EnvironmentException(m.format(self.get_display_language()))
+
+ def get_pie_args(self) -> T.List[str]:
+ m = 'Language {} does not support position-independent executable'
+ raise EnvironmentException(m.format(self.get_display_language()))
+
+ def get_pie_link_args(self) -> T.List[str]:
+ return self.linker.get_pie_args()
+
+ def get_argument_syntax(self) -> str:
+ """Returns the argument family type.
+
+ Compilers fall into families if they try to emulate the command line
+ interface of another compiler. For example, clang is in the GCC family
+ since it accepts most of the same arguments as GCC. ICL (ICC on
+ windows) is in the MSVC family since it accepts most of the same
+ arguments as MSVC.
+ """
+ return 'other'
+
+ def get_profile_generate_args(self) -> T.List[str]:
+ raise EnvironmentException(
+ '%s does not support get_profile_generate_args ' % self.get_id())
+
+ def get_profile_use_args(self) -> T.List[str]:
+ raise EnvironmentException(
+ '%s does not support get_profile_use_args ' % self.get_id())
+
+ def remove_linkerlike_args(self, args: T.List[str]) -> T.List[str]:
+ rm_exact = ('-headerpad_max_install_names',)
+ rm_prefixes = ('-Wl,', '-L',)
+ rm_next = ('-L', '-framework',)
+ ret = [] # T.List[str]
+ iargs = iter(args)
+ for arg in iargs:
+ # Remove this argument
+ if arg in rm_exact:
+ continue
+ # If the argument starts with this, but is not *exactly* this
+ # f.ex., '-L' should match ['-Lfoo'] but not ['-L', 'foo']
+ if arg.startswith(rm_prefixes) and arg not in rm_prefixes:
+ continue
+ # Ignore this argument and the one after it
+ if arg in rm_next:
+ next(iargs)
+ continue
+ ret.append(arg)
+ return ret
+
+ def get_lto_compile_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]:
+ return []
+
+ def get_lto_link_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]:
+ return self.linker.get_lto_args()
+
+ def sanitizer_compile_args(self, value: str) -> T.List[str]:
+ return []
+
+ def sanitizer_link_args(self, value: str) -> T.List[str]:
+ return self.linker.sanitizer_args(value)
+
+ def get_asneeded_args(self) -> T.List[str]:
+ return self.linker.get_asneeded_args()
+
+ def headerpad_args(self) -> T.List[str]:
+ return self.linker.headerpad_args()
+
+ def bitcode_args(self) -> T.List[str]:
+ return self.linker.bitcode_args()
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ raise EnvironmentException(f'{self.id} does not implement get_buildtype_args')
+
+ def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]:
+ return self.linker.get_buildtype_args(buildtype)
+
+ def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+ suffix: str, soversion: str,
+ darwin_versions: T.Tuple[str, str],
+ is_shared_module: bool) -> T.List[str]:
+ return self.linker.get_soname_args(
+ env, prefix, shlib_name, suffix, soversion,
+ darwin_versions, is_shared_module)
+
+ def get_target_link_args(self, target: 'BuildTarget') -> T.List[str]:
+ return target.link_args
+
+ def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]:
+ return dep.get_compile_args()
+
+ def get_dependency_link_args(self, dep: 'Dependency') -> T.List[str]:
+ return dep.get_link_args()
+
+ @classmethod
+ def use_linker_args(cls, linker: str) -> T.List[str]:
+ """Get a list of arguments to pass to the compiler to set the linker.
+ """
+ return []
+
+ def get_coverage_args(self) -> T.List[str]:
+ return []
+
+ def get_coverage_link_args(self) -> T.List[str]:
+ return self.linker.get_coverage_args()
+
+ def get_disable_assert_args(self) -> T.List[str]:
+ return []
+
+ def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+ raise EnvironmentException('This compiler does not support Windows CRT selection')
+
+ def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+ raise EnvironmentException('This compiler does not support Windows CRT selection')
+
+ def get_compile_only_args(self) -> T.List[str]:
+ return []
+
+ def get_preprocess_only_args(self) -> T.List[str]:
+ raise EnvironmentException('This compiler does not have a preprocessor')
+
+ def get_default_include_dirs(self) -> T.List[str]:
+ # TODO: This is a candidate for returning an immutable list
+ return []
+
+ def get_largefile_args(self) -> T.List[str]:
+ '''Enable transparent large-file-support for 32-bit UNIX systems'''
+ if not (self.get_argument_syntax() == 'msvc' or self.info.is_darwin()):
+ # Enable large-file support unconditionally on all platforms other
+ # than macOS and MSVC. macOS is now 64-bit-only so it doesn't
+ # need anything special, and MSVC doesn't have automatic LFS.
+ # You must use the 64-bit counterparts explicitly.
+ # glibc, musl, and uclibc, and all BSD libcs support this. On Android,
+ # support for transparent LFS is available depending on the version of
+ # Bionic: https://github.com/android/platform_bionic#32-bit-abi-bugs
+ # https://code.google.com/p/android/issues/detail?id=64613
+ #
+ # If this breaks your code, fix it! It's been 20+ years!
+ return ['-D_FILE_OFFSET_BITS=64']
+ # We don't enable -D_LARGEFILE64_SOURCE since that enables
+ # transitionary features and must be enabled by programs that use
+ # those features explicitly.
+ return []
+
+ def get_library_dirs(self, env: 'Environment',
+ elf_class: T.Optional[int] = None) -> T.List[str]:
+ return []
+
+ def get_return_value(self,
+ fname: str,
+ rtype: str,
+ prefix: str,
+ env: 'Environment',
+ extra_args: T.Optional[T.List[str]],
+ dependencies: T.Optional[T.List['Dependency']]) -> T.Union[str, int]:
+ raise EnvironmentException(f'{self.id} does not support get_return_value')
+
+ def find_framework(self,
+ name: str,
+ env: 'Environment',
+ extra_dirs: T.List[str],
+ allow_system: bool = True) -> T.Optional[T.List[str]]:
+ raise EnvironmentException(f'{self.id} does not support find_framework')
+
+ def find_framework_paths(self, env: 'Environment') -> T.List[str]:
+ raise EnvironmentException(f'{self.id} does not support find_framework_paths')
+
+ def attribute_check_func(self, name: str) -> str:
+ raise EnvironmentException(f'{self.id} does not support attribute checks')
+
+ def get_pch_suffix(self) -> str:
+ raise EnvironmentException(f'{self.id} does not support pre compiled headers')
+
+ def get_pch_name(self, name: str) -> str:
+ raise EnvironmentException(f'{self.id} does not support pre compiled headers')
+
+ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+ raise EnvironmentException(f'{self.id} does not support pre compiled headers')
+
+ def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]:
+ raise EnvironmentException(f'{self.id} does not support function attributes')
+
+ def name_string(self) -> str:
+ return ' '.join(self.exelist)
+
+ @abc.abstractmethod
+ def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+ """Check that this compiler actually works.
+
+ This should provide a simple compile/link test. Somthing as simple as:
+ ```python
+ main(): return 0
+ ```
+ is good enough here.
+ """
+
+ def split_shlib_to_parts(self, fname: str) -> T.Tuple[T.Optional[str], str]:
+ return None, fname
+
+ def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+ return []
+
+ def get_std_exe_link_args(self) -> T.List[str]:
+ # TODO: is this a linker property?
+ return []
+
+ def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+ return []
+
+ def depfile_for_object(self, objfile: str) -> str:
+ return objfile + '.' + self.get_depfile_suffix()
+
+ def get_depfile_suffix(self) -> str:
+ raise EnvironmentException(f'{self.id} does not implement get_depfile_suffix')
+
+ def get_no_stdinc_args(self) -> T.List[str]:
+ """Arguments to turn off default inclusion of standard libraries."""
+ return []
+
+ def get_warn_args(self, level: str) -> T.List[str]:
+ return []
+
+ def get_werror_args(self) -> T.List[str]:
+ return []
+
+ @abc.abstractmethod
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ pass
+
+ def get_module_incdir_args(self) -> T.Tuple[str, ...]:
+ raise EnvironmentException(f'{self.id} does not implement get_module_incdir_args')
+
+ def get_module_outdir_args(self, path: str) -> T.List[str]:
+ raise EnvironmentException(f'{self.id} does not implement get_module_outdir_args')
+
+ def module_name_to_filename(self, module_name: str) -> str:
+ raise EnvironmentException(f'{self.id} does not implement module_name_to_filename')
+
+ def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
+ """Arguments to pass the compiler and/or linker for checks.
+
+ The default implementation turns off optimizations.
+
+ Examples of things that go here:
+ - extra arguments for error checking
+ - Arguments required to make the compiler exit with a non-zero status
+ when something is wrong.
+ """
+ return self.get_no_optimization_args()
+
+ def get_no_optimization_args(self) -> T.List[str]:
+ """Arguments to the compiler to turn off all optimizations."""
+ return []
+
+ def build_wrapper_args(self, env: 'Environment',
+ extra_args: T.Union[None, CompilerArgs, T.List[str]],
+ dependencies: T.Optional[T.List['Dependency']],
+ mode: CompileCheckMode = CompileCheckMode.COMPILE) -> CompilerArgs:
+ """Arguments to pass the build_wrapper helper.
+
+ This generally needs to be set on a per-language baises. It provides
+ a hook for languages to handle dependencies and extra args. The base
+ implementation handles the most common cases, namely adding the
+ check_arguments, unwrapping dependencies, and appending extra args.
+ """
+ if callable(extra_args):
+ extra_args = extra_args(mode)
+ if extra_args is None:
+ extra_args = []
+ if dependencies is None:
+ dependencies = []
+
+ # Collect compiler arguments
+ args = self.compiler_args(self.get_compiler_check_args(mode))
+ for d in dependencies:
+ # Add compile flags needed by dependencies
+ args += d.get_compile_args()
+ if mode is CompileCheckMode.LINK:
+ # Add link flags needed to find dependencies
+ args += d.get_link_args()
+
+ if mode is CompileCheckMode.COMPILE:
+ # Add DFLAGS from the env
+ args += env.coredata.get_external_args(self.for_machine, self.language)
+ elif mode is CompileCheckMode.LINK:
+ # Add LDFLAGS from the env
+ args += env.coredata.get_external_link_args(self.for_machine, self.language)
+ # extra_args must override all other arguments, so we add them last
+ args += extra_args
+ return args
+
+ @contextlib.contextmanager
+ def _build_wrapper(self, code: str, env: 'Environment',
+ extra_args: T.Union[None, CompilerArgs, T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None,
+ mode: str = 'compile', want_output: bool = False,
+ disable_cache: bool = False,
+ temp_dir: str = None) -> T.Iterator[T.Optional[CompileResult]]:
+ """Helper for getting a cacched value when possible.
+
+ This method isn't meant to be called externally, it's mean to be
+ wrapped by other methods like compiles() and links().
+ """
+ args = self.build_wrapper_args(env, extra_args, dependencies, CompileCheckMode(mode))
+ if disable_cache or want_output:
+ with self.compile(code, extra_args=args, mode=mode, want_output=want_output, temp_dir=env.scratch_dir) as r:
+ yield r
+ else:
+ with self.cached_compile(code, env.coredata, extra_args=args, mode=mode, temp_dir=env.scratch_dir) as r:
+ yield r
+
+ def compiles(self, code: str, env: 'Environment', *,
+ extra_args: T.Union[None, T.List[str], CompilerArgs] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None,
+ mode: str = 'compile',
+ disable_cache: bool = False) -> T.Tuple[bool, bool]:
+ with self._build_wrapper(code, env, extra_args, dependencies, mode, disable_cache=disable_cache) as p:
+ return p.returncode == 0, p.cached
+
+
+ def links(self, code: str, env: 'Environment', *,
+ extra_args: T.Union[None, T.List[str], CompilerArgs] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None,
+ mode: str = 'compile',
+ disable_cache: bool = False) -> T.Tuple[bool, bool]:
+ return self.compiles(code, env, extra_args=extra_args,
+ dependencies=dependencies, mode='link', disable_cache=disable_cache)
+
+ def get_feature_args(self, kwargs: T.Dict[str, T.Any], build_to_src: str) -> T.List[str]:
+ """Used by D for extra language features."""
+ # TODO: using a TypeDict here would improve this
+ raise EnvironmentException(f'{self.id} does not implement get_feature_args')
+
+ def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.List[str]:
+ raise EnvironmentException(f'{self.id} does not know how to do prelinking.')
+
+ def rsp_file_syntax(self) -> 'RSPFileSyntax':
+ """The format of the RSP file that this compiler supports.
+
+ If `self.can_linker_accept_rsp()` returns True, then this needs to
+ be implemented
+ """
+ return self.linker.rsp_file_syntax()
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ """Arguments required for a debug build."""
+ return []
+
+
+def get_global_options(lang: str,
+ comp: T.Type[Compiler],
+ for_machine: MachineChoice,
+ env: 'Environment') -> 'KeyedOptionDictType':
+ """Retrieve options that apply to all compilers for a given language."""
+ description = f'Extra arguments passed to the {lang}'
+ argkey = OptionKey('args', lang=lang, machine=for_machine)
+ largkey = argkey.evolve('link_args')
+ envkey = argkey.evolve('env_args')
+
+ comp_key = argkey if argkey in env.options else envkey
+
+ comp_options = env.options.get(comp_key, [])
+ link_options = env.options.get(largkey, [])
+
+ cargs = coredata.UserArrayOption(
+ description + ' compiler',
+ comp_options, split_args=True, user_input=True, allow_dups=True)
+
+ largs = coredata.UserArrayOption(
+ description + ' linker',
+ link_options, split_args=True, user_input=True, allow_dups=True)
+
+ if comp.INVOKES_LINKER and comp_key == envkey:
+ # If the compiler acts as a linker driver, and we're using the
+ # environment variable flags for both the compiler and linker
+ # arguments, then put the compiler flags in the linker flags as well.
+ # This is how autotools works, and the env vars freature is for
+ # autotools compatibility.
+ largs.extend_value(comp_options)
+
+ opts: 'KeyedOptionDictType' = {argkey: cargs, largkey: largs}
+
+ return opts
diff --git a/meson/mesonbuild/compilers/cpp.py b/meson/mesonbuild/compilers/cpp.py
new file mode 100644
index 000000000..44155d1bd
--- /dev/null
+++ b/meson/mesonbuild/compilers/cpp.py
@@ -0,0 +1,823 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+import functools
+import os.path
+import typing as T
+
+from .. import coredata
+from .. import mlog
+from ..mesonlib import MesonException, MachineChoice, version_compare, OptionKey
+
+from .compilers import (
+ gnu_winlibs,
+ msvc_winlibs,
+ Compiler,
+ CompileCheckMode,
+)
+from .c_function_attributes import CXX_FUNC_ATTRIBUTES, C_FUNC_ATTRIBUTES
+from .mixins.clike import CLikeCompiler
+from .mixins.ccrx import CcrxCompiler
+from .mixins.c2000 import C2000Compiler
+from .mixins.arm import ArmCompiler, ArmclangCompiler
+from .mixins.visualstudio import MSVCCompiler, ClangClCompiler
+from .mixins.gnu import GnuCompiler
+from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler
+from .mixins.clang import ClangCompiler
+from .mixins.elbrus import ElbrusCompiler
+from .mixins.pgi import PGICompiler
+from .mixins.emscripten import EmscriptenMixin
+
+if T.TYPE_CHECKING:
+ from ..coredata import KeyedOptionDictType
+ from ..dependencies import Dependency
+ from ..envconfig import MachineInfo
+ from ..environment import Environment
+ from ..linkers import DynamicLinker
+ from ..programs import ExternalProgram
+ from .mixins.clike import CLikeCompiler as CompilerMixinBase
+else:
+ CompilerMixinBase = object
+
+
+def non_msvc_eh_options(eh: str, args: T.List[str]) -> None:
+ if eh == 'none':
+ args.append('-fno-exceptions')
+ elif eh == 's' or eh == 'c':
+ mlog.warning('non-MSVC compilers do not support ' + eh + ' exception handling.' +
+ 'You may want to set eh to \'default\'.')
+
+class CPPCompiler(CLikeCompiler, Compiler):
+
+ @classmethod
+ def attribute_check_func(cls, name: str) -> str:
+ try:
+ return CXX_FUNC_ATTRIBUTES.get(name, C_FUNC_ATTRIBUTES[name])
+ except KeyError:
+ raise MesonException(f'Unknown function attribute "{name}"')
+
+ language = 'cpp'
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ # If a child ObjCPP class has already set it, don't set it ourselves
+ Compiler.__init__(self, exelist, version, for_machine, info,
+ is_cross=is_cross, linker=linker,
+ full_version=full_version)
+ CLikeCompiler.__init__(self, exe_wrapper)
+
+ @staticmethod
+ def get_display_language() -> str:
+ return 'C++'
+
+ def get_no_stdinc_args(self) -> T.List[str]:
+ return ['-nostdinc++']
+
+ def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+ code = 'class breakCCompiler;int main(void) { return 0; }\n'
+ return self._sanity_check_impl(work_dir, environment, 'sanitycheckcpp.cc', code)
+
+ def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
+ # -fpermissive allows non-conforming code to compile which is necessary
+ # for many C++ checks. Particularly, the has_header_symbol check is
+ # too strict without this and always fails.
+ return super().get_compiler_check_args(mode) + ['-fpermissive']
+
+ def has_header_symbol(self, hname: str, symbol: str, prefix: str,
+ env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
+ # Check if it's a C-like symbol
+ found, cached = super().has_header_symbol(hname, symbol, prefix, env,
+ extra_args=extra_args,
+ dependencies=dependencies)
+ if found:
+ return True, cached
+ # Check if it's a class or a template
+ if extra_args is None:
+ extra_args = []
+ t = f'''{prefix}
+ #include <{hname}>
+ using {symbol};
+ int main(void) {{ return 0; }}'''
+ return self.compiles(t, env, extra_args=extra_args,
+ dependencies=dependencies)
+
+ def _test_cpp_std_arg(self, cpp_std_value: str) -> bool:
+ # Test whether the compiler understands a -std=XY argument
+ assert(cpp_std_value.startswith('-std='))
+
+ # This test does not use has_multi_arguments() for two reasons:
+ # 1. has_multi_arguments() requires an env argument, which the compiler
+ # object does not have at this point.
+ # 2. even if it did have an env object, that might contain another more
+ # recent -std= argument, which might lead to a cascaded failure.
+ CPP_TEST = 'int i = static_cast<int>(0);'
+ with self.compile(CPP_TEST, extra_args=[cpp_std_value], mode='compile') as p:
+ if p.returncode == 0:
+ mlog.debug(f'Compiler accepts {cpp_std_value}:', 'YES')
+ return True
+ else:
+ mlog.debug(f'Compiler accepts {cpp_std_value}:', 'NO')
+ return False
+
+ @functools.lru_cache()
+ def _find_best_cpp_std(self, cpp_std: str) -> str:
+ # The initial version mapping approach to make falling back
+ # from '-std=c++14' to '-std=c++1y' was too brittle. For instance,
+ # Apple's Clang uses a different versioning scheme to upstream LLVM,
+ # making the whole detection logic awfully brittle. Instead, let's
+ # just see if feeding GCC or Clang our '-std=' setting works, and
+ # if not, try the fallback argument.
+ CPP_FALLBACKS = {
+ 'c++11': 'c++0x',
+ 'gnu++11': 'gnu++0x',
+ 'c++14': 'c++1y',
+ 'gnu++14': 'gnu++1y',
+ 'c++17': 'c++1z',
+ 'gnu++17': 'gnu++1z',
+ 'c++20': 'c++2a',
+ 'gnu++20': 'gnu++2a',
+ }
+
+ # Currently, remapping is only supported for Clang, Elbrus and GCC
+ assert(self.id in frozenset(['clang', 'lcc', 'gcc', 'emscripten']))
+
+ if cpp_std not in CPP_FALLBACKS:
+ # 'c++03' and 'c++98' don't have fallback types
+ return '-std=' + cpp_std
+
+ for i in (cpp_std, CPP_FALLBACKS[cpp_std]):
+ cpp_std_value = '-std=' + i
+ if self._test_cpp_std_arg(cpp_std_value):
+ return cpp_std_value
+
+ raise MesonException(f'C++ Compiler does not support -std={cpp_std}')
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = super().get_options()
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts.update({
+ key: coredata.UserComboOption(
+ 'C++ language standard to use',
+ ['none'],
+ 'none',
+ ),
+ })
+ return opts
+
+
+class ClangCPPCompiler(ClangCompiler, CPPCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ defines: T.Optional[T.Dict[str, str]] = None,
+ full_version: T.Optional[str] = None):
+ CPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ ClangCompiler.__init__(self, defines)
+ default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = CPPCompiler.get_options(self)
+ key = OptionKey('key', machine=self.for_machine, lang=self.language)
+ opts.update({
+ key.evolve('eh'): coredata.UserComboOption(
+ 'C++ exception handling type.',
+ ['none', 'default', 'a', 's', 'sc'],
+ 'default',
+ ),
+ key.evolve('rtti'): coredata.UserBooleanOption('Enable RTTI', True),
+ })
+ opts[key.evolve('std')].choices = [
+ 'none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++1z',
+ 'c++2a', 'c++20', 'gnu++11', 'gnu++14', 'gnu++17', 'gnu++1z',
+ 'gnu++2a', 'gnu++20',
+ ]
+ if self.info.is_windows() or self.info.is_cygwin():
+ opts.update({
+ key.evolve('winlibs'): coredata.UserArrayOption(
+ 'Standard Win libraries to link against',
+ gnu_winlibs,
+ ),
+ })
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key]
+ if std.value != 'none':
+ args.append(self._find_best_cpp_std(std.value))
+
+ non_msvc_eh_options(options[key.evolve('eh')].value, args)
+
+ if not options[key.evolve('rtti')].value:
+ args.append('-fno-rtti')
+
+ return args
+
+ def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ if self.info.is_windows() or self.info.is_cygwin():
+ # without a typedict mypy can't understand this.
+ key = OptionKey('winlibs', machine=self.for_machine, lang=self.language)
+ libs = options[key].value.copy()
+ assert isinstance(libs, list)
+ for l in libs:
+ assert isinstance(l, str)
+ return libs
+ return []
+
+ def language_stdlib_only_link_flags(self) -> T.List[str]:
+ return ['-lstdc++']
+
+
+class AppleClangCPPCompiler(ClangCPPCompiler):
+ def language_stdlib_only_link_flags(self) -> T.List[str]:
+ return ['-lc++']
+
+
+class EmscriptenCPPCompiler(EmscriptenMixin, ClangCPPCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ defines: T.Optional[T.Dict[str, str]] = None,
+ full_version: T.Optional[str] = None):
+ if not is_cross:
+ raise MesonException('Emscripten compiler can only be used for cross compilation.')
+ ClangCPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper=exe_wrapper, linker=linker,
+ defines=defines, full_version=full_version)
+ self.id = 'emscripten'
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key]
+ if std.value != 'none':
+ args.append(self._find_best_cpp_std(std.value))
+ return args
+
+
+class ArmclangCPPCompiler(ArmclangCompiler, CPPCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ ArmclangCompiler.__init__(self)
+ default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = CPPCompiler.get_options(self)
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts.update({
+ key.evolve('eh'): coredata.UserComboOption(
+ 'C++ exception handling type.',
+ ['none', 'default', 'a', 's', 'sc'],
+ 'default',
+ ),
+ })
+ opts[key].choices = [
+ 'none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'gnu++98',
+ 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++17',
+ ]
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key]
+ if std.value != 'none':
+ args.append('-std=' + std.value)
+
+ non_msvc_eh_options(options[key.evolve('eh')].value, args)
+
+ return args
+
+ def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return []
+
+
+class GnuCPPCompiler(GnuCompiler, CPPCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ defines: T.Optional[T.Dict[str, str]] = None,
+ full_version: T.Optional[str] = None):
+ CPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ GnuCompiler.__init__(self, defines)
+ default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts = CPPCompiler.get_options(self)
+ opts.update({
+ key.evolve('eh'): coredata.UserComboOption(
+ 'C++ exception handling type.',
+ ['none', 'default', 'a', 's', 'sc'],
+ 'default',
+ ),
+ key.evolve('rtti'): coredata.UserBooleanOption('Enable RTTI', True),
+ key.evolve('debugstl'): coredata.UserBooleanOption(
+ 'STL debug mode',
+ False,
+ )
+ })
+ opts[key].choices = [
+ 'none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++1z',
+ 'c++2a', 'c++20', 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++17',
+ 'gnu++1z', 'gnu++2a', 'gnu++20',
+ ]
+ if self.info.is_windows() or self.info.is_cygwin():
+ opts.update({
+ key.evolve('winlibs'): coredata.UserArrayOption(
+ 'Standard Win libraries to link against',
+ gnu_winlibs,
+ ),
+ })
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key]
+ if std.value != 'none':
+ args.append(self._find_best_cpp_std(std.value))
+
+ non_msvc_eh_options(options[key.evolve('eh')].value, args)
+
+ if not options[key.evolve('rtti')].value:
+ args.append('-fno-rtti')
+
+ if options[key.evolve('debugstl')].value:
+ args.append('-D_GLIBCXX_DEBUG=1')
+ return args
+
+ def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ if self.info.is_windows() or self.info.is_cygwin():
+ # without a typedict mypy can't understand this.
+ key = OptionKey('winlibs', machine=self.for_machine, lang=self.language)
+ libs = options[key].value.copy()
+ assert isinstance(libs, list)
+ for l in libs:
+ assert isinstance(l, str)
+ return libs
+ return []
+
+ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+ return ['-fpch-preprocess', '-include', os.path.basename(header)]
+
+ def language_stdlib_only_link_flags(self) -> T.List[str]:
+ return ['-lstdc++']
+
+
+class PGICPPCompiler(PGICompiler, CPPCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ PGICompiler.__init__(self)
+
+
+class NvidiaHPC_CPPCompiler(PGICompiler, CPPCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ PGICompiler.__init__(self)
+
+ self.id = 'nvidia_hpc'
+
+
+class ElbrusCPPCompiler(GnuCPPCompiler, ElbrusCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ defines: T.Optional[T.Dict[str, str]] = None,
+ full_version: T.Optional[str] = None):
+ GnuCPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker,
+ full_version=full_version, defines=defines)
+ ElbrusCompiler.__init__(self)
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = CPPCompiler.get_options(self)
+
+ cpp_stds = [
+ 'none', 'c++98', 'c++03', 'c++0x', 'c++11', 'c++14', 'c++1y',
+ 'gnu++98', 'gnu++03', 'gnu++0x', 'gnu++11', 'gnu++14', 'gnu++1y',
+ ]
+
+ if version_compare(self.version, '>=1.24.00'):
+ cpp_stds += [ 'c++1z', 'c++17', 'gnu++1z', 'gnu++17' ]
+
+ if version_compare(self.version, '>=1.25.00'):
+ cpp_stds += [ 'c++2a', 'gnu++2a' ]
+
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts.update({
+ key.evolve('eh'): coredata.UserComboOption(
+ 'C++ exception handling type.',
+ ['none', 'default', 'a', 's', 'sc'],
+ 'default',
+ ),
+ key.evolve('debugstl'): coredata.UserBooleanOption(
+ 'STL debug mode',
+ False,
+ ),
+ })
+ opts[key].choices = cpp_stds
+ return opts
+
+ # Elbrus C++ compiler does not have lchmod, but there is only linker warning, not compiler error.
+ # So we should explicitly fail at this case.
+ def has_function(self, funcname: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
+ if funcname == 'lchmod':
+ return False, False
+ else:
+ return super().has_function(funcname, prefix, env,
+ extra_args=extra_args,
+ dependencies=dependencies)
+
+ # Elbrus C++ compiler does not support RTTI, so don't check for it.
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key]
+ if std.value != 'none':
+ args.append(self._find_best_cpp_std(std.value))
+
+ non_msvc_eh_options(options[key.evolve('eh')].value, args)
+
+ if options[key.evolve('debugstl')].value:
+ args.append('-D_GLIBCXX_DEBUG=1')
+ return args
+
+
+class IntelCPPCompiler(IntelGnuLikeCompiler, CPPCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ IntelGnuLikeCompiler.__init__(self)
+ self.lang_header = 'c++-header'
+ default_warn_args = ['-Wall', '-w3', '-diag-disable:remark',
+ '-Wpch-messages', '-Wnon-virtual-dtor']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra']}
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = CPPCompiler.get_options(self)
+ # Every Unix compiler under the sun seems to accept -std=c++03,
+ # with the exception of ICC. Instead of preventing the user from
+ # globally requesting C++03, we transparently remap it to C++98
+ c_stds = ['c++98', 'c++03']
+ g_stds = ['gnu++98', 'gnu++03']
+ if version_compare(self.version, '>=15.0.0'):
+ c_stds += ['c++11', 'c++14']
+ g_stds += ['gnu++11']
+ if version_compare(self.version, '>=16.0.0'):
+ c_stds += ['c++17']
+ if version_compare(self.version, '>=17.0.0'):
+ g_stds += ['gnu++14']
+ if version_compare(self.version, '>=19.1.0'):
+ c_stds += ['c++2a']
+ g_stds += ['gnu++2a']
+
+
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts.update({
+ key.evolve('eh'): coredata.UserComboOption(
+ 'C++ exception handling type.',
+ ['none', 'default', 'a', 's', 'sc'],
+ 'default',
+ ),
+ key.evolve('rtti'): coredata.UserBooleanOption('Enable RTTI', True),
+ key.evolve('debugstl'): coredata.UserBooleanOption('STL debug mode', False),
+ })
+ opts[key].choices = ['none'] + c_stds + g_stds
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key]
+ if std.value != 'none':
+ remap_cpp03 = {
+ 'c++03': 'c++98',
+ 'gnu++03': 'gnu++98'
+ }
+ args.append('-std=' + remap_cpp03.get(std.value, std.value))
+ if options[key.evolve('eh')].value == 'none':
+ args.append('-fno-exceptions')
+ if not options[key.evolve('rtti')].value:
+ args.append('-fno-rtti')
+ if options[key.evolve('debugstl')].value:
+ args.append('-D_GLIBCXX_DEBUG=1')
+ return args
+
+ def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return []
+
+
+class VisualStudioLikeCPPCompilerMixin(CompilerMixinBase):
+
+ """Mixin for C++ specific method overrides in MSVC-like compilers."""
+
+ VC_VERSION_MAP = {
+ 'none': (True, None),
+ 'vc++11': (True, 11),
+ 'vc++14': (True, 14),
+ 'vc++17': (True, 17),
+ 'vc++latest': (True, "latest"),
+ 'c++11': (False, 11),
+ 'c++14': (False, 14),
+ 'c++17': (False, 17),
+ 'c++latest': (False, "latest"),
+ }
+
+ def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ # need a typeddict for this
+ key = OptionKey('winlibs', machine=self.for_machine, lang=self.language)
+ return T.cast(T.List[str], options[key].value[:])
+
+ def _get_options_impl(self, opts: 'KeyedOptionDictType', cpp_stds: T.List[str]) -> 'KeyedOptionDictType':
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts.update({
+ key.evolve('eh'): coredata.UserComboOption(
+ 'C++ exception handling type.',
+ ['none', 'default', 'a', 's', 'sc'],
+ 'default',
+ ),
+ key.evolve('rtti'): coredata.UserBooleanOption('Enable RTTI', True),
+ key.evolve('winlibs'): coredata.UserArrayOption(
+ 'Windows libs to link against.',
+ msvc_winlibs,
+ ),
+ })
+ opts[key.evolve('std')].choices = cpp_stds
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+
+ eh = options[key.evolve('eh')]
+ if eh.value == 'default':
+ args.append('/EHsc')
+ elif eh.value == 'none':
+ args.append('/EHs-c-')
+ else:
+ args.append('/EH' + eh.value)
+
+ if not options[key.evolve('rtti')].value:
+ args.append('/GR-')
+
+ permissive, ver = self.VC_VERSION_MAP[options[key].value]
+
+ if ver is not None:
+ args.append(f'/std:c++{ver}')
+
+ if not permissive:
+ args.append('/permissive-')
+
+ return args
+
+ def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
+ # XXX: this is a hack because so much GnuLike stuff is in the base CPPCompiler class.
+ return Compiler.get_compiler_check_args(self, mode)
+
+
+class CPP11AsCPP14Mixin(CompilerMixinBase):
+
+ """Mixin class for VisualStudio and ClangCl to replace C++11 std with C++14.
+
+ This is a limitation of Clang and MSVC that ICL doesn't share.
+ """
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ # Note: there is no explicit flag for supporting C++11; we attempt to do the best we can
+ # which means setting the C++ standard version to C++14, in compilers that support it
+ # (i.e., after VS2015U3)
+ # if one is using anything before that point, one cannot set the standard.
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ if options[key].value in {'vc++11', 'c++11'}:
+ mlog.warning(self.id, 'does not support C++11;',
+ 'attempting best effort; setting the standard to C++14', once=True)
+ # Don't mutate anything we're going to change, we need to use
+ # deepcopy since we're messing with members, and we can't simply
+ # copy the members because the option proxy doesn't support it.
+ options = copy.deepcopy(options)
+ if options[key].value == 'vc++11':
+ options[key].value = 'vc++14'
+ else:
+ options[key].value = 'c++14'
+ return super().get_option_compile_args(options)
+
+
+class VisualStudioCPPCompiler(CPP11AsCPP14Mixin, VisualStudioLikeCPPCompilerMixin, MSVCCompiler, CPPCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo', target: str,
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ MSVCCompiler.__init__(self, target)
+ self.id = 'msvc'
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ cpp_stds = ['none', 'c++11', 'vc++11']
+ # Visual Studio 2015 and later
+ if version_compare(self.version, '>=19'):
+ cpp_stds.extend(['c++14', 'c++latest', 'vc++latest'])
+ # Visual Studio 2017 and later
+ if version_compare(self.version, '>=19.11'):
+ cpp_stds.extend(['vc++14', 'c++17', 'vc++17'])
+ return self._get_options_impl(super().get_options(), cpp_stds)
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ if options[key].value != 'none' and version_compare(self.version, '<19.00.24210'):
+ mlog.warning('This version of MSVC does not support cpp_std arguments')
+ options = copy.copy(options)
+ options[key].value = 'none'
+
+ args = super().get_option_compile_args(options)
+
+ if version_compare(self.version, '<19.11'):
+ try:
+ i = args.index('/permissive-')
+ except ValueError:
+ return args
+ del args[i]
+ return args
+
+class ClangClCPPCompiler(CPP11AsCPP14Mixin, VisualStudioLikeCPPCompilerMixin, ClangClCompiler, CPPCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo', target: str,
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ ClangClCompiler.__init__(self, target)
+ self.id = 'clang-cl'
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ cpp_stds = ['none', 'c++11', 'vc++11', 'c++14', 'vc++14', 'c++17', 'vc++17', 'c++latest']
+ return self._get_options_impl(super().get_options(), cpp_stds)
+
+
+class IntelClCPPCompiler(VisualStudioLikeCPPCompilerMixin, IntelVisualStudioLikeCompiler, CPPCompiler):
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo', target: str,
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ IntelVisualStudioLikeCompiler.__init__(self, target)
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ # This has only been tested with version 19.0,
+ cpp_stds = ['none', 'c++11', 'vc++11', 'c++14', 'vc++14', 'c++17', 'vc++17', 'c++latest']
+ return self._get_options_impl(super().get_options(), cpp_stds)
+
+ def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
+ # XXX: this is a hack because so much GnuLike stuff is in the base CPPCompiler class.
+ return IntelVisualStudioLikeCompiler.get_compiler_check_args(self, mode)
+
+
+class ArmCPPCompiler(ArmCompiler, CPPCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ ArmCompiler.__init__(self)
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = CPPCompiler.get_options(self)
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts[key].choices = ['none', 'c++03', 'c++11']
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key]
+ if std.value == 'c++11':
+ args.append('--cpp11')
+ elif std.value == 'c++03':
+ args.append('--cpp')
+ return args
+
+ def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return []
+
+ def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
+ return []
+
+
+class CcrxCPPCompiler(CcrxCompiler, CPPCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ CcrxCompiler.__init__(self)
+
+ # Override CCompiler.get_always_args
+ def get_always_args(self) -> T.List[str]:
+ return ['-nologo', '-lang=cpp']
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return []
+
+ def get_compile_only_args(self) -> T.List[str]:
+ return []
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return ['-output=obj=%s' % target]
+
+ def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return []
+
+ def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
+ return []
+
+class C2000CPPCompiler(C2000Compiler, CPPCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ CPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ C2000Compiler.__init__(self)
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = CPPCompiler.get_options(self)
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts[key].choices = ['none', 'c++03']
+ return opts
+
+ def get_always_args(self) -> T.List[str]:
+ return ['-nologo', '-lang=cpp']
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return []
+
+ def get_compile_only_args(self) -> T.List[str]:
+ return []
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return ['-output=obj=%s' % target]
+
+ def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return []
+
+ def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
+ return []
diff --git a/meson/mesonbuild/compilers/cs.py b/meson/mesonbuild/compilers/cs.py
new file mode 100644
index 000000000..7ebb66def
--- /dev/null
+++ b/meson/mesonbuild/compilers/cs.py
@@ -0,0 +1,150 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path, subprocess
+import textwrap
+import typing as T
+
+from ..mesonlib import EnvironmentException
+from ..linkers import RSPFileSyntax
+
+from .compilers import Compiler, MachineChoice, mono_buildtype_args
+from .mixins.islinker import BasicLinkerIsCompilerMixin
+
+if T.TYPE_CHECKING:
+ from ..envconfig import MachineInfo
+ from ..environment import Environment
+
+cs_optimization_args = {'0': [],
+ 'g': [],
+ '1': ['-optimize+'],
+ '2': ['-optimize+'],
+ '3': ['-optimize+'],
+ 's': ['-optimize+'],
+ } # type: T.Dict[str, T.List[str]]
+
+
+class CsCompiler(BasicLinkerIsCompilerMixin, Compiler):
+
+ language = 'cs'
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ info: 'MachineInfo', comp_id: str, runner: T.Optional[str] = None):
+ super().__init__(exelist, version, for_machine, info)
+ self.id = comp_id
+ self.runner = runner
+
+ @classmethod
+ def get_display_language(cls) -> str:
+ return 'C sharp'
+
+ def get_always_args(self) -> T.List[str]:
+ return ['/nologo']
+
+ def get_linker_always_args(self) -> T.List[str]:
+ return ['/nologo']
+
+ def get_output_args(self, fname: str) -> T.List[str]:
+ return ['-out:' + fname]
+
+ def get_link_args(self, fname: str) -> T.List[str]:
+ return ['-r:' + fname]
+
+ def get_werror_args(self) -> T.List[str]:
+ return ['-warnaserror']
+
+ def get_pic_args(self) -> T.List[str]:
+ return []
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+ build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-L':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+ if i[:5] == '-lib:':
+ parameter_list[idx] = i[:5] + os.path.normpath(os.path.join(build_dir, i[5:]))
+
+ return parameter_list
+
+ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+ return []
+
+ def get_pch_name(self, header_name: str) -> str:
+ return ''
+
+ def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+ src = 'sanity.cs'
+ obj = 'sanity.exe'
+ source_name = os.path.join(work_dir, src)
+ with open(source_name, 'w', encoding='utf-8') as ofile:
+ ofile.write(textwrap.dedent('''
+ public class Sanity {
+ static public void Main () {
+ }
+ }
+ '''))
+ pc = subprocess.Popen(self.exelist + self.get_always_args() + [src], cwd=work_dir)
+ pc.wait()
+ if pc.returncode != 0:
+ raise EnvironmentException('C# compiler %s can not compile programs.' % self.name_string())
+ if self.runner:
+ cmdlist = [self.runner, obj]
+ else:
+ cmdlist = [os.path.join(work_dir, obj)]
+ pe = subprocess.Popen(cmdlist, cwd=work_dir)
+ pe.wait()
+ if pe.returncode != 0:
+ raise EnvironmentException('Executables created by Mono compiler %s are not runnable.' % self.name_string())
+
+ def needs_static_linker(self) -> bool:
+ return False
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return mono_buildtype_args[buildtype]
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ return ['-debug'] if is_debug else []
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return cs_optimization_args[optimization_level]
+
+
+class MonoCompiler(CsCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ info: 'MachineInfo'):
+ super().__init__(exelist, version, for_machine, info, 'mono',
+ runner='mono')
+
+ def rsp_file_syntax(self) -> 'RSPFileSyntax':
+ return RSPFileSyntax.GCC
+
+
+class VisualStudioCsCompiler(CsCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ info: 'MachineInfo'):
+ super().__init__(exelist, version, for_machine, info, 'csc')
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ res = mono_buildtype_args[buildtype]
+ if not self.info.is_windows():
+ tmp = []
+ for flag in res:
+ if flag == '-debug':
+ flag = '-debug:portable'
+ tmp.append(flag)
+ res = tmp
+ return res
+
+ def rsp_file_syntax(self) -> 'RSPFileSyntax':
+ return RSPFileSyntax.MSVC
diff --git a/meson/mesonbuild/compilers/cuda.py b/meson/mesonbuild/compilers/cuda.py
new file mode 100644
index 000000000..36da833be
--- /dev/null
+++ b/meson/mesonbuild/compilers/cuda.py
@@ -0,0 +1,760 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import enum
+import os.path
+import string
+import typing as T
+
+from .. import coredata
+from .. import mlog
+from ..mesonlib import (
+ EnvironmentException, MachineChoice, Popen_safe, OptionOverrideProxy,
+ is_windows, LibType, OptionKey,
+)
+from .compilers import (Compiler, cuda_buildtype_args, cuda_optimization_args,
+ cuda_debug_args)
+
+if T.TYPE_CHECKING:
+ from ..build import BuildTarget
+ from ..coredata import KeyedOptionDictType
+ from ..dependencies import Dependency
+ from ..environment import Environment # noqa: F401
+ from ..envconfig import MachineInfo
+ from ..linkers import DynamicLinker
+ from ..programs import ExternalProgram
+
+
+class _Phase(enum.Enum):
+
+ COMPILER = 'compiler'
+ LINKER = 'linker'
+
+
+class CudaCompiler(Compiler):
+
+ LINKER_PREFIX = '-Xlinker='
+ language = 'cuda'
+
+ # NVCC flags taking no arguments.
+ _FLAG_PASSTHRU_NOARGS = {
+ # NVCC --long-option, NVCC -short-option CUDA Toolkit 11.2.1 Reference
+ '--objdir-as-tempdir', '-objtemp', # 4.2.1.2
+ '--generate-dependency-targets', '-MP', # 4.2.1.12
+ '--allow-unsupported-compiler', '-allow-unsupported-compiler', # 4.2.1.14
+ '--link', # 4.2.2.1
+ '--lib', '-lib', # 4.2.2.2
+ '--device-link', '-dlink', # 4.2.2.3
+ '--device-c', '-dc', # 4.2.2.4
+ '--device-w', '-dw', # 4.2.2.5
+ '--cuda', '-cuda', # 4.2.2.6
+ '--compile', '-c', # 4.2.2.7
+ '--fatbin', '-fatbin', # 4.2.2.8
+ '--cubin', '-cubin', # 4.2.2.9
+ '--ptx', '-ptx', # 4.2.2.10
+ '--preprocess', '-E', # 4.2.2.11
+ '--generate-dependencies', '-M', # 4.2.2.12
+ '--generate-nonsystem-dependencies', '-MM', # 4.2.2.13
+ '--generate-dependencies-with-compile', '-MD', # 4.2.2.14
+ '--generate-nonsystem-dependencies-with-compile', '-MMD', # 4.2.2.15
+ '--run', # 4.2.2.16
+ '--profile', '-pg', # 4.2.3.1
+ '--debug', '-g', # 4.2.3.2
+ '--device-debug', '-G', # 4.2.3.3
+ '--extensible-whole-program', '-ewp', # 4.2.3.4
+ '--generate-line-info', '-lineinfo', # 4.2.3.5
+ '--dlink-time-opt', '-dlto', # 4.2.3.8
+ '--no-exceptions', '-noeh', # 4.2.3.11
+ '--shared', '-shared', # 4.2.3.12
+ '--no-host-device-initializer-list', '-nohdinitlist', # 4.2.3.15
+ '--expt-relaxed-constexpr', '-expt-relaxed-constexpr', # 4.2.3.16
+ '--extended-lambda', '-extended-lambda', # 4.2.3.17
+ '--expt-extended-lambda', '-expt-extended-lambda', # 4.2.3.18
+ '--m32', '-m32', # 4.2.3.20
+ '--m64', '-m64', # 4.2.3.21
+ '--forward-unknown-to-host-compiler', '-forward-unknown-to-host-compiler', # 4.2.5.1
+ '--forward-unknown-to-host-linker', '-forward-unknown-to-host-linker', # 4.2.5.2
+ '--dont-use-profile', '-noprof', # 4.2.5.3
+ '--dryrun', '-dryrun', # 4.2.5.5
+ '--verbose', '-v', # 4.2.5.6
+ '--keep', '-keep', # 4.2.5.7
+ '--save-temps', '-save-temps', # 4.2.5.9
+ '--clean-targets', '-clean', # 4.2.5.10
+ '--no-align-double', # 4.2.5.16
+ '--no-device-link', '-nodlink', # 4.2.5.17
+ '--allow-unsupported-compiler', '-allow-unsupported-compiler', # 4.2.5.18
+ '--use_fast_math', '-use_fast_math', # 4.2.7.7
+ '--extra-device-vectorization', '-extra-device-vectorization', # 4.2.7.12
+ '--compile-as-tools-patch', '-astoolspatch', # 4.2.7.13
+ '--keep-device-functions', '-keep-device-functions', # 4.2.7.14
+ '--disable-warnings', '-w', # 4.2.8.1
+ '--source-in-ptx', '-src-in-ptx', # 4.2.8.2
+ '--restrict', '-restrict', # 4.2.8.3
+ '--Wno-deprecated-gpu-targets', '-Wno-deprecated-gpu-targets', # 4.2.8.4
+ '--Wno-deprecated-declarations', '-Wno-deprecated-declarations', # 4.2.8.5
+ '--Wreorder', '-Wreorder', # 4.2.8.6
+ '--Wdefault-stream-launch', '-Wdefault-stream-launch', # 4.2.8.7
+ '--Wext-lambda-captures-this', '-Wext-lambda-captures-this', # 4.2.8.8
+ '--display-error-number', '-err-no', # 4.2.8.10
+ '--resource-usage', '-res-usage', # 4.2.8.14
+ '--help', '-h', # 4.2.8.15
+ '--version', '-V', # 4.2.8.16
+ '--list-gpu-code', '-code-ls', # 4.2.8.20
+ '--list-gpu-arch', '-arch-ls', # 4.2.8.21
+ }
+ # Dictionary of NVCC flags taking either one argument or a comma-separated list.
+ # Maps --long to -short options, because the short options are more GCC-like.
+ _FLAG_LONG2SHORT_WITHARGS = {
+ '--output-file': '-o', # 4.2.1.1
+ '--pre-include': '-include', # 4.2.1.3
+ '--library': '-l', # 4.2.1.4
+ '--define-macro': '-D', # 4.2.1.5
+ '--undefine-macro': '-U', # 4.2.1.6
+ '--include-path': '-I', # 4.2.1.7
+ '--system-include': '-isystem', # 4.2.1.8
+ '--library-path': '-L', # 4.2.1.9
+ '--output-directory': '-odir', # 4.2.1.10
+ '--dependency-output': '-MF', # 4.2.1.11
+ '--compiler-bindir': '-ccbin', # 4.2.1.13
+ '--archiver-binary': '-arbin', # 4.2.1.15
+ '--cudart': '-cudart', # 4.2.1.16
+ '--cudadevrt': '-cudadevrt', # 4.2.1.17
+ '--libdevice-directory': '-ldir', # 4.2.1.18
+ '--target-directory': '-target-dir', # 4.2.1.19
+ '--optimization-info': '-opt-info', # 4.2.3.6
+ '--optimize': '-O', # 4.2.3.7
+ '--ftemplate-backtrace-limit': '-ftemplate-backtrace-limit', # 4.2.3.9
+ '--ftemplate-depth': '-ftemplate-depth', # 4.2.3.10
+ '--x': '-x', # 4.2.3.13
+ '--std': '-std', # 4.2.3.14
+ '--machine': '-m', # 4.2.3.19
+ '--compiler-options': '-Xcompiler', # 4.2.4.1
+ '--linker-options': '-Xlinker', # 4.2.4.2
+ '--archive-options': '-Xarchive', # 4.2.4.3
+ '--ptxas-options': '-Xptxas', # 4.2.4.4
+ '--nvlink-options': '-Xnvlink', # 4.2.4.5
+ '--threads': '-t', # 4.2.5.4
+ '--keep-dir': '-keep-dir', # 4.2.5.8
+ '--run-args': '-run-args', # 4.2.5.11
+ '--input-drive-prefix': '-idp', # 4.2.5.12
+ '--dependency-drive-prefix': '-ddp', # 4.2.5.13
+ '--drive-prefix': '-dp', # 4.2.5.14
+ '--dependency-target-name': '-MT', # 4.2.5.15
+ '--default-stream': '-default-stream', # 4.2.6.1
+ '--gpu-architecture': '-arch', # 4.2.7.1
+ '--gpu-code': '-code', # 4.2.7.2
+ '--generate-code': '-gencode', # 4.2.7.3
+ '--relocatable-device-code': '-rdc', # 4.2.7.4
+ '--entries': '-e', # 4.2.7.5
+ '--maxrregcount': '-maxrregcount', # 4.2.7.6
+ '--ftz': '-ftz', # 4.2.7.8
+ '--prec-div': '-prec-div', # 4.2.7.9
+ '--prec-sqrt': '-prec-sqrt', # 4.2.7.10
+ '--fmad': '-fmad', # 4.2.7.11
+ '--Werror': '-Werror', # 4.2.8.9
+ '--diag-error': '-diag-error', # 4.2.8.11
+ '--diag-suppress': '-diag-suppress', # 4.2.8.12
+ '--diag-warn': '-diag-warn', # 4.2.8.13
+ '--options-file': '-optf', # 4.2.8.17
+ '--time': '-time', # 4.2.8.18
+ '--qpp-config': '-qpp-config', # 4.2.8.19
+ }
+ # Reverse map -short to --long options.
+ _FLAG_SHORT2LONG_WITHARGS = {v:k for k,v in _FLAG_LONG2SHORT_WITHARGS.items()}
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, exe_wrapper: T.Optional['ExternalProgram'],
+ host_compiler: Compiler, info: 'MachineInfo',
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ super().__init__(exelist, version, for_machine, info, linker=linker, full_version=full_version, is_cross=is_cross)
+ self.exe_wrapper = exe_wrapper
+ self.host_compiler = host_compiler
+ self.base_options = host_compiler.base_options
+ self.id = 'nvcc'
+ self.warn_args = {level: self._to_host_flags(flags) for level, flags in host_compiler.warn_args.items()}
+
+ @classmethod
+ def _shield_nvcc_list_arg(cls, arg: str, listmode: bool=True) -> str:
+ r"""
+ Shield an argument against both splitting by NVCC's list-argument
+ parse logic, and interpretation by any shell.
+
+ NVCC seems to consider every comma , that is neither escaped by \ nor inside
+ a double-quoted string a split-point. Single-quotes do not provide protection
+ against splitting; In fact, after splitting they are \-escaped. Unfortunately,
+ double-quotes don't protect against shell expansion. What follows is a
+ complex dance to accomodate everybody.
+ """
+
+ SQ = "'"
+ DQ = '"'
+ CM = ","
+ BS = "\\"
+ DQSQ = DQ+SQ+DQ
+ quotable = set(string.whitespace+'"$`\\')
+
+ if CM not in arg or not listmode:
+ if SQ not in arg:
+ # If any of the special characters "$`\ or whitespace are present, single-quote.
+ # Otherwise return bare.
+ if set(arg).intersection(quotable):
+ return SQ+arg+SQ
+ else:
+ return arg # Easy case: no splits, no quoting.
+ else:
+ # There are single quotes. Double-quote them, and single-quote the
+ # strings between them.
+ l = [cls._shield_nvcc_list_arg(s) for s in arg.split(SQ)]
+ l = sum([[s, DQSQ] for s in l][:-1], []) # Interleave l with DQSQs
+ return ''.join(l)
+ else:
+ # A comma is present, and list mode was active.
+ # We apply (what we guess is) the (primitive) NVCC splitting rule:
+ l = ['']
+ instring = False
+ argit = iter(arg)
+ for c in argit:
+ if c == CM and not instring:
+ l.append('')
+ elif c == DQ:
+ l[-1] += c
+ instring = not instring
+ elif c == BS:
+ try:
+ l[-1] += next(argit)
+ except StopIteration:
+ break
+ else:
+ l[-1] += c
+
+ # Shield individual strings, without listmode, then return them with
+ # escaped commas between them.
+ l = [cls._shield_nvcc_list_arg(s, listmode=False) for s in l]
+ return r'\,'.join(l)
+
+ @classmethod
+ def _merge_flags(cls, flags: T.List[str]) -> T.List[str]:
+ r"""
+ The flags to NVCC gets exceedingly verbose and unreadable when too many of them
+ are shielded with -Xcompiler. Merge consecutive -Xcompiler-wrapped arguments
+ into one.
+ """
+ if len(flags) <= 1:
+ return flags
+ flagit = iter(flags)
+ xflags = []
+
+ def is_xcompiler_flag_isolated(flag: str) -> bool:
+ return flag == '-Xcompiler'
+ def is_xcompiler_flag_glued(flag: str) -> bool:
+ return flag.startswith('-Xcompiler=')
+ def is_xcompiler_flag(flag: str) -> bool:
+ return is_xcompiler_flag_isolated(flag) or is_xcompiler_flag_glued(flag)
+ def get_xcompiler_val(flag: str, flagit: T.Iterator[str]) -> str:
+ if is_xcompiler_flag_glued(flag):
+ return flag[len('-Xcompiler='):]
+ else:
+ try:
+ return next(flagit)
+ except StopIteration:
+ return ""
+
+ ingroup = False
+ for flag in flagit:
+ if not is_xcompiler_flag(flag):
+ ingroup = False
+ xflags.append(flag)
+ elif ingroup:
+ xflags[-1] += ','
+ xflags[-1] += get_xcompiler_val(flag, flagit)
+ elif is_xcompiler_flag_isolated(flag):
+ ingroup = True
+ xflags.append(flag)
+ xflags.append(get_xcompiler_val(flag, flagit))
+ elif is_xcompiler_flag_glued(flag):
+ ingroup = True
+ xflags.append(flag)
+ else:
+ raise ValueError("-Xcompiler flag merging failed, unknown argument form!")
+ return xflags
+
+ @classmethod
+ def _to_host_flags(cls, flags: T.List[str], phase: _Phase = _Phase.COMPILER) -> T.List[str]:
+ """
+ Translate generic "GCC-speak" plus particular "NVCC-speak" flags to NVCC flags.
+
+ NVCC's "short" flags have broad similarities to the GCC standard, but have
+ gratuitous, irritating differences.
+ """
+
+ xflags = []
+ flagit = iter(flags)
+
+ for flag in flagit:
+ # The CUDA Toolkit Documentation, in 4.1. Command Option Types and Notation,
+ # specifies that NVCC does not parse the standard flags as GCC does. It has
+ # its own strategy, to wit:
+ #
+ # nvcc recognizes three types of command options: boolean options, single
+ # value options, and list options.
+ #
+ # Boolean options do not have an argument; they are either specified on a
+ # command line or not. Single value options must be specified at most once,
+ # and list options may be repeated. Examples of each of these option types
+ # are, respectively: --verbose (switch to verbose mode), --output-file
+ # (specify output file), and --include-path (specify include path).
+ #
+ # Single value options and list options must have arguments, which must
+ # follow the name of the option itself by either one of more spaces or an
+ # equals character. When a one-character short name such as -I, -l, and -L
+ # is used, the value of the option may also immediately follow the option
+ # itself without being seperated by spaces or an equal character. The
+ # individual values of list options may be separated by commas in a single
+ # instance of the option, or the option may be repeated, or any
+ # combination of these two cases.
+ #
+ # One strange consequence of this choice is that directory and filenames that
+ # contain commas (',') cannot be passed to NVCC (at least, not as easily as
+ # in GCC). Another strange consequence is that it is legal to supply flags
+ # such as
+ #
+ # -lpthread,rt,dl,util
+ # -l pthread,rt,dl,util
+ # -l=pthread,rt,dl,util
+ #
+ # and each of the above alternatives is equivalent to GCC-speak
+ #
+ # -lpthread -lrt -ldl -lutil
+ # -l pthread -l rt -l dl -l util
+ # -l=pthread -l=rt -l=dl -l=util
+ #
+ # *With the exception of commas in the name*, GCC-speak for these list flags
+ # is a strict subset of NVCC-speak, so we passthrough those flags.
+ #
+ # The -D macro-define flag is documented as somehow shielding commas from
+ # splitting a definition. Balanced parentheses, braces and single-quotes
+ # around the comma are not sufficient, but balanced double-quotes are. The
+ # shielding appears to work with -l, -I, -L flags as well, for instance.
+ #
+ # Since our goal is to replicate GCC-speak as much as possible, we check for
+ # commas in all list-arguments and shield them with double-quotes. We make
+ # an exception for -D (where this would be value-changing) and -U (because
+ # it isn't possible to define a macro with a comma in the name).
+
+ if flag in cls._FLAG_PASSTHRU_NOARGS:
+ xflags.append(flag)
+ continue
+
+
+ # Handle breakup of flag-values into a flag-part and value-part.
+ if flag[:1] not in '-/':
+ # This is not a flag. It's probably a file input. Pass it through.
+ xflags.append(flag)
+ continue
+ elif flag[:1] == '/':
+ # This is ambiguously either an MVSC-style /switch or an absolute path
+ # to a file. For some magical reason the following works acceptably in
+ # both cases.
+ wrap = '"' if ',' in flag else ''
+ xflags.append(f'-X{phase.value}={wrap}{flag}{wrap}')
+ continue
+ elif len(flag) >= 2 and flag[0] == '-' and flag[1] in 'IDULlmOxmte':
+ # This is a single-letter short option. These options (with the
+ # exception of -o) are allowed to receive their argument with neither
+ # space nor = sign before them. Detect and separate them in that event.
+ if flag[2:3] == '': # -I something
+ try:
+ val = next(flagit)
+ except StopIteration:
+ pass
+ elif flag[2:3] == '=': # -I=something
+ val = flag[3:]
+ else: # -Isomething
+ val = flag[2:]
+ flag = flag[:2] # -I
+ elif flag in cls._FLAG_LONG2SHORT_WITHARGS or \
+ flag in cls._FLAG_SHORT2LONG_WITHARGS:
+ # This is either -o or a multi-letter flag, and it is receiving its
+ # value isolated.
+ try:
+ val = next(flagit) # -o something
+ except StopIteration:
+ pass
+ elif flag.split('=',1)[0] in cls._FLAG_LONG2SHORT_WITHARGS or \
+ flag.split('=',1)[0] in cls._FLAG_SHORT2LONG_WITHARGS:
+ # This is either -o or a multi-letter flag, and it is receiving its
+ # value after an = sign.
+ flag, val = flag.split('=',1) # -o=something
+ else:
+ # This is a flag, and it's foreign to NVCC.
+ #
+ # We do not know whether this GCC-speak flag takes an isolated
+ # argument. Assuming it does not (the vast majority indeed don't),
+ # wrap this argument in an -Xcompiler flag and send it down to NVCC.
+ if flag == '-ffast-math':
+ xflags.append('-use_fast_math')
+ xflags.append('-Xcompiler='+flag)
+ elif flag == '-fno-fast-math':
+ xflags.append('-ftz=false')
+ xflags.append('-prec-div=true')
+ xflags.append('-prec-sqrt=true')
+ xflags.append('-Xcompiler='+flag)
+ elif flag == '-freciprocal-math':
+ xflags.append('-prec-div=false')
+ xflags.append('-Xcompiler='+flag)
+ elif flag == '-fno-reciprocal-math':
+ xflags.append('-prec-div=true')
+ xflags.append('-Xcompiler='+flag)
+ else:
+ xflags.append('-Xcompiler='+cls._shield_nvcc_list_arg(flag))
+ # The above should securely handle GCC's -Wl, -Wa, -Wp, arguments.
+ continue
+
+
+ assert val is not None # Should only trip if there is a missing argument.
+
+
+ # Take care of the various NVCC-supported flags that need special handling.
+ flag = cls._FLAG_LONG2SHORT_WITHARGS.get(flag,flag)
+
+ if flag in {'-include','-isystem','-I','-L','-l'}:
+ # These flags are known to GCC, but list-valued in NVCC. They potentially
+ # require double-quoting to prevent NVCC interpreting the flags as lists
+ # when GCC would not have done so.
+ #
+ # We avoid doing this quoting for -D to avoid redefining macros and for
+ # -U because it isn't possible to define a macro with a comma in the name.
+ # -U with comma arguments is impossible in GCC-speak (and thus unambiguous
+ #in NVCC-speak, albeit unportable).
+ if len(flag) == 2:
+ xflags.append(flag+cls._shield_nvcc_list_arg(val))
+ else:
+ xflags.append(flag)
+ xflags.append(cls._shield_nvcc_list_arg(val))
+ elif flag == '-O':
+ # Handle optimization levels GCC knows about that NVCC does not.
+ if val == 'fast':
+ xflags.append('-O3')
+ xflags.append('-use_fast_math')
+ xflags.append('-Xcompiler')
+ xflags.append(flag+val)
+ elif val in {'s', 'g', 'z'}:
+ xflags.append('-Xcompiler')
+ xflags.append(flag+val)
+ else:
+ xflags.append(flag+val)
+ elif flag in {'-D', '-U', '-m', '-t'}:
+ xflags.append(flag+val) # For style, keep glued.
+ elif flag in {'-std'}:
+ xflags.append(flag+'='+val) # For style, keep glued.
+ else:
+ xflags.append(flag)
+ xflags.append(val)
+
+ return cls._merge_flags(xflags)
+
+ def needs_static_linker(self) -> bool:
+ return False
+
+ def thread_link_flags(self, environment: 'Environment') -> T.List[str]:
+ return self._to_host_flags(self.host_compiler.thread_link_flags(environment), _Phase.LINKER)
+
+ def sanity_check(self, work_dir: str, env: 'Environment') -> None:
+ mlog.debug('Sanity testing ' + self.get_display_language() + ' compiler:', ' '.join(self.exelist))
+ mlog.debug('Is cross compiler: %s.' % str(self.is_cross))
+
+ sname = 'sanitycheckcuda.cu'
+ code = r'''
+ #include <cuda_runtime.h>
+ #include <stdio.h>
+
+ __global__ void kernel (void) {}
+
+ int main(void){
+ struct cudaDeviceProp prop;
+ int count, i;
+ cudaError_t ret = cudaGetDeviceCount(&count);
+ if(ret != cudaSuccess){
+ fprintf(stderr, "%d\n", (int)ret);
+ }else{
+ for(i=0;i<count;i++){
+ if(cudaGetDeviceProperties(&prop, i) == cudaSuccess){
+ fprintf(stdout, "%d.%d\n", prop.major, prop.minor);
+ }
+ }
+ }
+ fflush(stderr);
+ fflush(stdout);
+ return 0;
+ }
+ '''
+ binname = sname.rsplit('.', 1)[0]
+ binname += '_cross' if self.is_cross else ''
+ source_name = os.path.join(work_dir, sname)
+ binary_name = os.path.join(work_dir, binname + '.exe')
+ with open(source_name, 'w', encoding='utf-8') as ofile:
+ ofile.write(code)
+
+ # The Sanity Test for CUDA language will serve as both a sanity test
+ # and a native-build GPU architecture detection test, useful later.
+ #
+ # For this second purpose, NVCC has very handy flags, --run and
+ # --run-args, that allow one to run an application with the
+ # environment set up properly. Of course, this only works for native
+ # builds; For cross builds we must still use the exe_wrapper (if any).
+ self.detected_cc = ''
+ flags = []
+
+ # Disable warnings, compile with statically-linked runtime for minimum
+ # reliance on the system.
+ flags += ['-w', '-cudart', 'static', source_name]
+
+ # Use the -ccbin option, if available, even during sanity checking.
+ # Otherwise, on systems where CUDA does not support the default compiler,
+ # NVCC becomes unusable.
+ flags += self.get_ccbin_args(env.coredata.options)
+
+ # If cross-compiling, we can't run the sanity check, only compile it.
+ if self.is_cross and self.exe_wrapper is None:
+ # Linking cross built apps is painful. You can't really
+ # tell if you should use -nostdlib or not and for example
+ # on OSX the compiler binary is the same but you need
+ # a ton of compiler flags to differentiate between
+ # arm and x86_64. So just compile.
+ flags += self.get_compile_only_args()
+ flags += self.get_output_args(binary_name)
+
+ # Compile sanity check
+ cmdlist = self.exelist + flags
+ mlog.debug('Sanity check compiler command line: ', ' '.join(cmdlist))
+ pc, stdo, stde = Popen_safe(cmdlist, cwd=work_dir)
+ mlog.debug('Sanity check compile stdout: ')
+ mlog.debug(stdo)
+ mlog.debug('-----\nSanity check compile stderr:')
+ mlog.debug(stde)
+ mlog.debug('-----')
+ if pc.returncode != 0:
+ raise EnvironmentException(f'Compiler {self.name_string()} can not compile programs.')
+
+ # Run sanity check (if possible)
+ if self.is_cross:
+ if self.exe_wrapper is None:
+ return
+ else:
+ cmdlist = self.exe_wrapper.get_command() + [binary_name]
+ else:
+ cmdlist = self.exelist + ['--run', '"' + binary_name + '"']
+ mlog.debug('Sanity check run command line: ', ' '.join(cmdlist))
+ pe, stdo, stde = Popen_safe(cmdlist, cwd=work_dir)
+ mlog.debug('Sanity check run stdout: ')
+ mlog.debug(stdo)
+ mlog.debug('-----\nSanity check run stderr:')
+ mlog.debug(stde)
+ mlog.debug('-----')
+ pe.wait()
+ if pe.returncode != 0:
+ raise EnvironmentException(f'Executables created by {self.language} compiler {self.name_string()} are not runnable.')
+
+ # Interpret the result of the sanity test.
+ # As mentioned above, it is not only a sanity test but also a GPU
+ # architecture detection test.
+ if stde == '':
+ self.detected_cc = stdo
+ else:
+ mlog.debug('cudaGetDeviceCount() returned ' + stde)
+
+ def has_header_symbol(self, hname: str, symbol: str, prefix: str,
+ env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
+ if extra_args is None:
+ extra_args = []
+ fargs = {'prefix': prefix, 'header': hname, 'symbol': symbol}
+ # Check if it's a C-like symbol
+ t = '''{prefix}
+ #include <{header}>
+ int main(void) {{
+ /* If it's not defined as a macro, try to use as a symbol */
+ #ifndef {symbol}
+ {symbol};
+ #endif
+ return 0;
+ }}'''
+ found, cached = self.compiles(t.format_map(fargs), env, extra_args=extra_args, dependencies=dependencies)
+ if found:
+ return True, cached
+ # Check if it's a class or a template
+ t = '''{prefix}
+ #include <{header}>
+ using {symbol};
+ int main(void) {{
+ return 0;
+ }}'''
+ return self.compiles(t.format_map(fargs), env, extra_args=extra_args, dependencies=dependencies)
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = super().get_options()
+ std_key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ ccbindir_key = OptionKey('ccbindir', machine=self.for_machine, lang=self.language)
+ opts.update({
+ std_key: coredata.UserComboOption('C++ language standard to use with CUDA',
+ ['none', 'c++03', 'c++11', 'c++14', 'c++17'], 'none'),
+ ccbindir_key: coredata.UserStringOption('CUDA non-default toolchain directory to use (-ccbin)',
+ ''),
+ })
+ return opts
+
+ def _to_host_compiler_options(self, options: 'KeyedOptionDictType') -> 'KeyedOptionDictType':
+ """
+ Convert an NVCC Option set to a host compiler's option set.
+ """
+
+ # We must strip the -std option from the host compiler option set, as NVCC has
+ # its own -std flag that may not agree with the host compiler's.
+ host_options = {key: options.get(key, opt) for key, opt in self.host_compiler.get_options().items()}
+ std_key = OptionKey('std', machine=self.for_machine, lang=self.host_compiler.language)
+ overrides = {std_key: 'none'}
+ return OptionOverrideProxy(overrides, host_options)
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = self.get_ccbin_args(options)
+ # On Windows, the version of the C++ standard used by nvcc is dictated by
+ # the combination of CUDA version and MSVC version; the --std= is thus ignored
+ # and attempting to use it will result in a warning: https://stackoverflow.com/a/51272091/741027
+ if not is_windows():
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key]
+ if std.value != 'none':
+ args.append('--std=' + std.value)
+
+ return args + self._to_host_flags(self.host_compiler.get_option_compile_args(self._to_host_compiler_options(options)))
+
+ def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = self.get_ccbin_args(options)
+ return args + self._to_host_flags(self.host_compiler.get_option_link_args(self._to_host_compiler_options(options)), _Phase.LINKER)
+
+ def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+ suffix: str, soversion: str,
+ darwin_versions: T.Tuple[str, str],
+ is_shared_module: bool) -> T.List[str]:
+ return self._to_host_flags(self.host_compiler.get_soname_args(
+ env, prefix, shlib_name, suffix, soversion, darwin_versions,
+ is_shared_module), _Phase.LINKER)
+
+ def get_compile_only_args(self) -> T.List[str]:
+ return ['-c']
+
+ def get_no_optimization_args(self) -> T.List[str]:
+ return ['-O0']
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ # alternatively, consider simply redirecting this to the host compiler, which would
+ # give us more control over options like "optimize for space" (which nvcc doesn't support):
+ # return self._to_host_flags(self.host_compiler.get_optimization_args(optimization_level))
+ return cuda_optimization_args[optimization_level]
+
+ def sanitizer_compile_args(self, value: str) -> T.List[str]:
+ return self._to_host_flags(self.host_compiler.sanitizer_compile_args(value))
+
+ def sanitizer_link_args(self, value: str) -> T.List[str]:
+ return self._to_host_flags(self.host_compiler.sanitizer_link_args(value))
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ return cuda_debug_args[is_debug]
+
+ def get_werror_args(self) -> T.List[str]:
+ return ['-Werror=cross-execution-space-call,deprecated-declarations,reorder']
+
+ def get_warn_args(self, level: str) -> T.List[str]:
+ return self.warn_args[level]
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ # nvcc doesn't support msvc's "Edit and Continue" PDB format; "downgrade" to
+ # a regular PDB to avoid cl's warning to that effect (D9025 : overriding '/ZI' with '/Zi')
+ host_args = ['/Zi' if arg == '/ZI' else arg for arg in self.host_compiler.get_buildtype_args(buildtype)]
+ return cuda_buildtype_args[buildtype] + self._to_host_flags(host_args)
+
+ def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+ if path == '':
+ path = '.'
+ return ['-isystem=' + path] if is_system else ['-I' + path]
+
+ def get_compile_debugfile_args(self, rel_obj: str, pch: bool = False) -> T.List[str]:
+ return self._to_host_flags(self.host_compiler.get_compile_debugfile_args(rel_obj, pch))
+
+ def get_link_debugfile_args(self, targetfile: str) -> T.List[str]:
+ return self._to_host_flags(self.host_compiler.get_link_debugfile_args(targetfile), _Phase.LINKER)
+
+ def get_depfile_suffix(self) -> str:
+ return 'd'
+
+ def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]:
+ return self._to_host_flags(self.host_compiler.get_buildtype_linker_args(buildtype), _Phase.LINKER)
+
+ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+ rpath_paths: str, build_rpath: str,
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ (rpath_args, rpath_dirs_to_remove) = self.host_compiler.build_rpath_args(
+ env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+ return (self._to_host_flags(rpath_args, _Phase.LINKER), rpath_dirs_to_remove)
+
+ def linker_to_compiler_args(self, args: T.List[str]) -> T.List[str]:
+ return args
+
+ def get_pic_args(self) -> T.List[str]:
+ return self._to_host_flags(self.host_compiler.get_pic_args())
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+ build_dir: str) -> T.List[str]:
+ return []
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return ['-o', target]
+
+ def get_std_exe_link_args(self) -> T.List[str]:
+ return self._to_host_flags(self.host_compiler.get_std_exe_link_args(), _Phase.LINKER)
+
+ def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
+ libtype: LibType = LibType.PREFER_SHARED) -> T.Optional[T.List[str]]:
+ return ['-l' + libname] # FIXME
+
+ def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+ return self._to_host_flags(self.host_compiler.get_crt_compile_args(crt_val, buildtype))
+
+ def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+ # nvcc defaults to static, release version of msvc runtime and provides no
+ # native option to override it; override it with /NODEFAULTLIB
+ host_link_arg_overrides = []
+ host_crt_compile_args = self.host_compiler.get_crt_compile_args(crt_val, buildtype)
+ if any(arg in ['/MDd', '/MD', '/MTd'] for arg in host_crt_compile_args):
+ host_link_arg_overrides += ['/NODEFAULTLIB:LIBCMT.lib']
+ return self._to_host_flags(host_link_arg_overrides + self.host_compiler.get_crt_link_args(crt_val, buildtype), _Phase.LINKER)
+
+ def get_target_link_args(self, target: 'BuildTarget') -> T.List[str]:
+ return self._to_host_flags(super().get_target_link_args(target), _Phase.LINKER)
+
+ def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]:
+ return self._to_host_flags(super().get_dependency_compile_args(dep))
+
+ def get_dependency_link_args(self, dep: 'Dependency') -> T.List[str]:
+ return self._to_host_flags(super().get_dependency_link_args(dep), _Phase.LINKER)
+
+ def get_ccbin_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ key = OptionKey('ccbindir', machine=self.for_machine, lang=self.language)
+ ccbindir = options[key].value
+ if isinstance(ccbindir, str) and ccbindir != '':
+ return [self._shield_nvcc_list_arg('-ccbin='+ccbindir, False)]
+ else:
+ return []
diff --git a/meson/mesonbuild/compilers/cython.py b/meson/mesonbuild/compilers/cython.py
new file mode 100644
index 000000000..513f07995
--- /dev/null
+++ b/meson/mesonbuild/compilers/cython.py
@@ -0,0 +1,79 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2021 Intel Corporation
+
+"""Abstraction for Cython language compilers."""
+
+import typing as T
+
+from .. import coredata
+from ..mesonlib import EnvironmentException, OptionKey
+from .compilers import Compiler
+
+if T.TYPE_CHECKING:
+ from ..coredata import KeyedOptionDictType
+ from ..environment import Environment
+
+
+class CythonCompiler(Compiler):
+
+ """Cython Compiler."""
+
+ language = 'cython'
+ id = 'cython'
+
+ def needs_static_linker(self) -> bool:
+ # We transpile into C, so we don't need any linker
+ return False
+
+ def get_always_args(self) -> T.List[str]:
+ return ['--fast-fail']
+
+ def get_werror_args(self) -> T.List[str]:
+ return ['-Werror']
+
+ def get_output_args(self, outputname: str) -> T.List[str]:
+ return ['-o', outputname]
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ # Cython doesn't have optimization levels itself, the underlying
+ # compiler might though
+ return []
+
+ def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+ code = 'print("hello world")'
+ with self.cached_compile(code, environment.coredata) as p:
+ if p.returncode != 0:
+ raise EnvironmentException(f'Cython compiler {self.id!r} cannot compile programs')
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ # Cython doesn't implement this, but Meson requires an implementation
+ return []
+
+ def get_pic_args(self) -> T.List[str]:
+ # We can lie here, it's fine
+ return []
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+ build_dir: str) -> T.List[str]:
+ new: T.List[str] = []
+ for i in parameter_list:
+ new.append(i)
+
+ return new
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = super().get_options()
+ opts.update({
+ OptionKey('version', machine=self.for_machine, lang=self.language): coredata.UserComboOption(
+ 'Python version to target',
+ ['2', '3'],
+ '3',
+ )
+ })
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args: T.List[str] = []
+ key = options[OptionKey('version', machine=self.for_machine, lang=self.language)]
+ args.append(f'-{key.value}')
+ return args
diff --git a/meson/mesonbuild/compilers/d.py b/meson/mesonbuild/compilers/d.py
new file mode 100644
index 000000000..b5ec905c2
--- /dev/null
+++ b/meson/mesonbuild/compilers/d.py
@@ -0,0 +1,906 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path
+import re
+import subprocess
+import typing as T
+
+from ..mesonlib import (
+ EnvironmentException, MachineChoice, version_compare, OptionKey, is_windows
+)
+
+from ..arglist import CompilerArgs
+from ..linkers import RSPFileSyntax
+from .compilers import (
+ d_dmd_buildtype_args,
+ d_gdc_buildtype_args,
+ d_ldc_buildtype_args,
+ clike_debug_args,
+ Compiler,
+)
+from .mixins.gnu import GnuCompiler
+
+if T.TYPE_CHECKING:
+ from .compilers import Compiler as CompilerMixinBase
+ from ..programs import ExternalProgram
+ from ..envconfig import MachineInfo
+ from ..environment import Environment
+ from ..linkers import DynamicLinker
+else:
+ CompilerMixinBase = object
+
+d_feature_args = {'gcc': {'unittest': '-funittest',
+ 'debug': '-fdebug',
+ 'version': '-fversion',
+ 'import_dir': '-J'
+ },
+ 'llvm': {'unittest': '-unittest',
+ 'debug': '-d-debug',
+ 'version': '-d-version',
+ 'import_dir': '-J'
+ },
+ 'dmd': {'unittest': '-unittest',
+ 'debug': '-debug',
+ 'version': '-version',
+ 'import_dir': '-J'
+ }
+ } # type: T.Dict[str, T.Dict[str, str]]
+
+ldc_optimization_args = {'0': [],
+ 'g': [],
+ '1': ['-O1'],
+ '2': ['-O2'],
+ '3': ['-O3'],
+ 's': ['-Os'],
+ } # type: T.Dict[str, T.List[str]]
+
+dmd_optimization_args = {'0': [],
+ 'g': [],
+ '1': ['-O'],
+ '2': ['-O'],
+ '3': ['-O'],
+ 's': ['-O'],
+ } # type: T.Dict[str, T.List[str]]
+
+
+class DmdLikeCompilerMixin(CompilerMixinBase):
+
+ """Mixin class for DMD and LDC.
+
+ LDC has a number of DMD like arguments, and this class allows for code
+ sharing between them as makes sense.
+ """
+
+ def __init__(self, dmd_frontend_version: T.Optional[str]):
+ if dmd_frontend_version is None:
+ self._dmd_has_depfile = False
+ else:
+ # -makedeps switch introduced in 2.095 frontend
+ self._dmd_has_depfile = version_compare(dmd_frontend_version, ">=2.095.0")
+
+ if T.TYPE_CHECKING:
+ mscrt_args = {} # type: T.Dict[str, T.List[str]]
+
+ def _get_target_arch_args(self) -> T.List[str]: ...
+
+ LINKER_PREFIX = '-L='
+
+ def get_output_args(self, outputname: str) -> T.List[str]:
+ return ['-of=' + outputname]
+
+ def get_linker_output_args(self, outputname: str) -> T.List[str]:
+ return ['-of=' + outputname]
+
+ def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+ return ['-I=' + path]
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+ build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i[:3] == '-I=':
+ parameter_list[idx] = i[:3] + os.path.normpath(os.path.join(build_dir, i[3:]))
+ if i[:4] == '-L-L':
+ parameter_list[idx] = i[:4] + os.path.normpath(os.path.join(build_dir, i[4:]))
+ if i[:5] == '-L=-L':
+ parameter_list[idx] = i[:5] + os.path.normpath(os.path.join(build_dir, i[5:]))
+ if i[:6] == '-Wl,-L':
+ parameter_list[idx] = i[:6] + os.path.normpath(os.path.join(build_dir, i[6:]))
+
+ return parameter_list
+
+ def get_warn_args(self, level: str) -> T.List[str]:
+ return ['-wi']
+
+ def get_werror_args(self) -> T.List[str]:
+ return ['-w']
+
+ def get_coverage_args(self) -> T.List[str]:
+ return ['-cov']
+
+ def get_coverage_link_args(self) -> T.List[str]:
+ return []
+
+ def get_preprocess_only_args(self) -> T.List[str]:
+ return ['-E']
+
+ def get_compile_only_args(self) -> T.List[str]:
+ return ['-c']
+
+ def get_depfile_suffix(self) -> str:
+ return 'deps'
+
+ def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+ if self._dmd_has_depfile:
+ return [f'-makedeps={outfile}']
+ return []
+
+ def get_pic_args(self) -> T.List[str]:
+ if self.info.is_windows():
+ return []
+ return ['-fPIC']
+
+ def get_feature_args(self, kwargs: T.Dict[str, T.Any], build_to_src: str) -> T.List[str]:
+ # TODO: using a TypeDict here would improve this
+ res = []
+ # get_feature_args can be called multiple times for the same target when there is generated source
+ # so we have to copy the kwargs (target.d_features) dict before popping from it
+ kwargs = kwargs.copy()
+ if 'unittest' in kwargs:
+ unittest = kwargs.pop('unittest')
+ unittest_arg = d_feature_args[self.id]['unittest']
+ if not unittest_arg:
+ raise EnvironmentException('D compiler %s does not support the "unittest" feature.' % self.name_string())
+ if unittest:
+ res.append(unittest_arg)
+
+ if 'debug' in kwargs:
+ debug_level = -1
+ debugs = kwargs.pop('debug')
+ if not isinstance(debugs, list):
+ debugs = [debugs]
+
+ debug_arg = d_feature_args[self.id]['debug']
+ if not debug_arg:
+ raise EnvironmentException('D compiler %s does not support conditional debug identifiers.' % self.name_string())
+
+ # Parse all debug identifiers and the largest debug level identifier
+ for d in debugs:
+ if isinstance(d, int):
+ if d > debug_level:
+ debug_level = d
+ elif isinstance(d, str) and d.isdigit():
+ if int(d) > debug_level:
+ debug_level = int(d)
+ else:
+ res.append(f'{debug_arg}={d}')
+
+ if debug_level >= 0:
+ res.append(f'{debug_arg}={debug_level}')
+
+ if 'versions' in kwargs:
+ version_level = -1
+ versions = kwargs.pop('versions')
+ if not isinstance(versions, list):
+ versions = [versions]
+
+ version_arg = d_feature_args[self.id]['version']
+ if not version_arg:
+ raise EnvironmentException('D compiler %s does not support conditional version identifiers.' % self.name_string())
+
+ # Parse all version identifiers and the largest version level identifier
+ for v in versions:
+ if isinstance(v, int):
+ if v > version_level:
+ version_level = v
+ elif isinstance(v, str) and v.isdigit():
+ if int(v) > version_level:
+ version_level = int(v)
+ else:
+ res.append(f'{version_arg}={v}')
+
+ if version_level >= 0:
+ res.append(f'{version_arg}={version_level}')
+
+ if 'import_dirs' in kwargs:
+ import_dirs = kwargs.pop('import_dirs')
+ if not isinstance(import_dirs, list):
+ import_dirs = [import_dirs]
+
+ import_dir_arg = d_feature_args[self.id]['import_dir']
+ if not import_dir_arg:
+ raise EnvironmentException('D compiler %s does not support the "string import directories" feature.' % self.name_string())
+ for idir_obj in import_dirs:
+ basedir = idir_obj.get_curdir()
+ for idir in idir_obj.get_incdirs():
+ bldtreedir = os.path.join(basedir, idir)
+ # Avoid superfluous '/.' at the end of paths when d is '.'
+ if idir not in ('', '.'):
+ expdir = bldtreedir
+ else:
+ expdir = basedir
+ srctreedir = os.path.join(build_to_src, expdir)
+ res.append(f'{import_dir_arg}{srctreedir}')
+ res.append(f'{import_dir_arg}{bldtreedir}')
+
+ if kwargs:
+ raise EnvironmentException('Unknown D compiler feature(s) selected: %s' % ', '.join(kwargs.keys()))
+
+ return res
+
+ def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]:
+ if buildtype != 'plain':
+ return self._get_target_arch_args()
+ return []
+
+ def gen_import_library_args(self, implibname: str) -> T.List[str]:
+ return self.linker.import_library_args(implibname)
+
+ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+ rpath_paths: str, build_rpath: str,
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ if self.info.is_windows():
+ return ([], set())
+
+ # GNU ld, solaris ld, and lld acting like GNU ld
+ if self.linker.id.startswith('ld'):
+ # The way that dmd and ldc pass rpath to gcc is different than we would
+ # do directly, each argument -rpath and the value to rpath, need to be
+ # split into two separate arguments both prefaced with the -L=.
+ args = []
+ (rpath_args, rpath_dirs_to_remove) = super().build_rpath_args(
+ env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+ for r in rpath_args:
+ if ',' in r:
+ a, b = r.split(',', maxsplit=1)
+ args.append(a)
+ args.append(self.LINKER_PREFIX + b)
+ else:
+ args.append(r)
+ return (args, rpath_dirs_to_remove)
+
+ return super().build_rpath_args(
+ env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+
+ def _translate_args_to_nongnu(self, args: T.List[str]) -> T.List[str]:
+ # Translate common arguments to flags the LDC/DMD compilers
+ # can understand.
+ # The flags might have been added by pkg-config files,
+ # and are therefore out of the user's control.
+ dcargs = []
+ # whether we hit a linker argument that expect another arg
+ # see the comment in the "-L" section
+ link_expect_arg = False
+ link_flags_with_arg = [
+ '-rpath', '-soname', '-compatibility_version', '-current_version',
+ ]
+ for arg in args:
+ # Translate OS specific arguments first.
+ osargs = [] # type: T.List[str]
+ if self.info.is_windows():
+ osargs = self.translate_arg_to_windows(arg)
+ elif self.info.is_darwin():
+ osargs = self._translate_arg_to_osx(arg)
+ if osargs:
+ dcargs.extend(osargs)
+ continue
+
+ # Translate common D arguments here.
+ if arg == '-pthread':
+ continue
+ if arg.startswith('-fstack-protector'):
+ continue
+ if arg.startswith('-D'):
+ continue
+ if arg.startswith('-Wl,'):
+ # Translate linker arguments here.
+ linkargs = arg[arg.index(',') + 1:].split(',')
+ for la in linkargs:
+ dcargs.append('-L=' + la.strip())
+ continue
+ elif arg.startswith(('-link-defaultlib', '-linker', '-link-internally', '-linkonce-templates', '-lib')):
+ # these are special arguments to the LDC linker call,
+ # arguments like "-link-defaultlib-shared" do *not*
+ # denote a library to be linked, but change the default
+ # Phobos/DRuntime linking behavior, while "-linker" sets the
+ # default linker.
+ dcargs.append(arg)
+ continue
+ elif arg.startswith('-l'):
+ # translate library link flag
+ dcargs.append('-L=' + arg)
+ continue
+ elif arg.startswith('-isystem'):
+ # translate -isystem system include path
+ # this flag might sometimes be added by C library Cflags via
+ # pkg-config.
+ # NOTE: -isystem and -I are not 100% equivalent, so this is just
+ # a workaround for the most common cases.
+ if arg.startswith('-isystem='):
+ dcargs.append('-I=' + arg[9:])
+ else:
+ dcargs.append('-I' + arg[8:])
+ continue
+ elif arg.startswith('-idirafter'):
+ # same as -isystem, but appends the path instead
+ if arg.startswith('-idirafter='):
+ dcargs.append('-I=' + arg[11:])
+ else:
+ dcargs.append('-I' + arg[10:])
+ continue
+ elif arg.startswith('-L'):
+ # The D linker expect library search paths in the form of -L=-L/path (the '=' is optional).
+ #
+ # This function receives a mix of arguments already prepended
+ # with -L for the D linker driver and other linker arguments.
+ # The arguments starting with -L can be:
+ # - library search path (with or without a second -L)
+ # - it can come from pkg-config (a single -L)
+ # - or from the user passing linker flags (-L-L would be expected)
+ # - arguments like "-L=-rpath" that expect a second argument (also prepended with -L)
+ # - arguments like "-L=@rpath/xxx" without a second argument (on Apple platform)
+ # - arguments like "-L=/SUBSYSTEM:CONSOLE (for Windows linker)
+ #
+ # The logic that follows trys to detect all these cases (some may be missing)
+ # in order to prepend a -L only for the library search paths with a single -L
+
+ if arg.startswith('-L='):
+ suffix = arg[3:]
+ else:
+ suffix = arg[2:]
+
+ if link_expect_arg:
+ # flags like rpath and soname expect a path or filename respectively,
+ # we must not alter it (i.e. prefixing with -L for a lib search path)
+ dcargs.append(arg)
+ link_expect_arg = False
+ continue
+
+ if suffix in link_flags_with_arg:
+ link_expect_arg = True
+
+ if suffix.startswith('-') or suffix.startswith('@'):
+ # this is not search path
+ dcargs.append(arg)
+ continue
+
+ # linker flag such as -L=/DEBUG must pass through
+ if self.linker.id == 'link' and self.info.is_windows() and suffix.startswith('/'):
+ dcargs.append(arg)
+ continue
+
+ # Make sure static library files are passed properly to the linker.
+ if arg.endswith('.a') or arg.endswith('.lib'):
+ if len(suffix) > 0 and not suffix.startswith('-'):
+ dcargs.append('-L=' + suffix)
+ continue
+
+ dcargs.append('-L=' + arg)
+ continue
+ elif not arg.startswith('-') and arg.endswith(('.a', '.lib')):
+ # ensure static libraries are passed through to the linker
+ dcargs.append('-L=' + arg)
+ continue
+ else:
+ dcargs.append(arg)
+
+ return dcargs
+
+ @classmethod
+ def translate_arg_to_windows(cls, arg: str) -> T.List[str]:
+ args = []
+ if arg.startswith('-Wl,'):
+ # Translate linker arguments here.
+ linkargs = arg[arg.index(',') + 1:].split(',')
+ for la in linkargs:
+ if la.startswith('--out-implib='):
+ # Import library name
+ args.append('-L=/IMPLIB:' + la[13:].strip())
+ elif arg.startswith('-mscrtlib='):
+ args.append(arg)
+ mscrtlib = arg[10:].lower()
+ if cls is LLVMDCompiler:
+ # Default crt libraries for LDC2 must be excluded for other
+ # selected crt options.
+ if mscrtlib != 'libcmt':
+ args.append('-L=/NODEFAULTLIB:libcmt')
+ args.append('-L=/NODEFAULTLIB:libvcruntime')
+
+ # Fixes missing definitions for printf-functions in VS2017
+ if mscrtlib.startswith('msvcrt'):
+ args.append('-L=/DEFAULTLIB:legacy_stdio_definitions.lib')
+
+ return args
+
+ @classmethod
+ def _translate_arg_to_osx(cls, arg: str) -> T.List[str]:
+ args = []
+ if arg.startswith('-install_name'):
+ args.append('-L=' + arg)
+ return args
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ ddebug_args = []
+ if is_debug:
+ ddebug_args = [d_feature_args[self.id]['debug']]
+
+ return clike_debug_args[is_debug] + ddebug_args
+
+ def _get_crt_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+ if not self.info.is_windows():
+ return []
+
+ if crt_val in self.mscrt_args:
+ return self.mscrt_args[crt_val]
+ assert(crt_val in ['from_buildtype', 'static_from_buildtype'])
+
+ dbg = 'mdd'
+ rel = 'md'
+ if crt_val == 'static_from_buildtype':
+ dbg = 'mtd'
+ rel = 'mt'
+
+ # Match what build type flags used to do.
+ if buildtype == 'plain':
+ return []
+ elif buildtype == 'debug':
+ return self.mscrt_args[dbg]
+ elif buildtype == 'debugoptimized':
+ return self.mscrt_args[rel]
+ elif buildtype == 'release':
+ return self.mscrt_args[rel]
+ elif buildtype == 'minsize':
+ return self.mscrt_args[rel]
+ else:
+ assert(buildtype == 'custom')
+ raise EnvironmentException('Requested C runtime based on buildtype, but buildtype is "custom".')
+
+ def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+ suffix: str, soversion: str,
+ darwin_versions: T.Tuple[str, str],
+ is_shared_module: bool) -> T.List[str]:
+ sargs = super().get_soname_args(env, prefix, shlib_name, suffix,
+ soversion, darwin_versions, is_shared_module)
+
+ # LDC and DMD actually do use a linker, but they proxy all of that with
+ # their own arguments
+ if self.linker.id.startswith('ld.'):
+ soargs = []
+ for arg in sargs:
+ a, b = arg.split(',', maxsplit=1)
+ soargs.append(a)
+ soargs.append(self.LINKER_PREFIX + b)
+ return soargs
+ elif self.linker.id.startswith('ld64'):
+ soargs = []
+ for arg in sargs:
+ if not arg.startswith(self.LINKER_PREFIX):
+ soargs.append(self.LINKER_PREFIX + arg)
+ else:
+ soargs.append(arg)
+ return soargs
+ else:
+ return sargs
+
+ def get_allow_undefined_link_args(self) -> T.List[str]:
+ args = self.linker.get_allow_undefined_args()
+ if self.info.is_darwin():
+ # On macOS we're passing these options to the C compiler, but
+ # they're linker options and need -Wl, so clang/gcc knows what to
+ # do with them. I'm assuming, but don't know for certain, that
+ # ldc/dmd do some kind of mapping internally for arguments they
+ # understand, but pass arguments they don't understand directly.
+ args = [a.replace('-L=', '-Xcc=-Wl,') for a in args]
+ return args
+
+
+class DCompilerArgs(CompilerArgs):
+ prepend_prefixes = ('-I', '-L')
+ dedup2_prefixes = ('-I', )
+
+
+class DCompiler(Compiler):
+ mscrt_args = {
+ 'none': ['-mscrtlib='],
+ 'md': ['-mscrtlib=msvcrt'],
+ 'mdd': ['-mscrtlib=msvcrtd'],
+ 'mt': ['-mscrtlib=libcmt'],
+ 'mtd': ['-mscrtlib=libcmtd'],
+ }
+
+ language = 'd'
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ info: 'MachineInfo', arch: str, *,
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None,
+ is_cross: bool = False):
+ super().__init__(exelist, version, for_machine, info, linker=linker,
+ full_version=full_version, is_cross=is_cross)
+ self.arch = arch
+ self.exe_wrapper = exe_wrapper
+
+ def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+ source_name = os.path.join(work_dir, 'sanity.d')
+ output_name = os.path.join(work_dir, 'dtest')
+ with open(source_name, 'w', encoding='utf-8') as ofile:
+ ofile.write('''void main() { }''')
+ pc = subprocess.Popen(self.exelist + self.get_output_args(output_name) + self._get_target_arch_args() + [source_name], cwd=work_dir)
+ pc.wait()
+ if pc.returncode != 0:
+ raise EnvironmentException('D compiler %s can not compile programs.' % self.name_string())
+ if self.is_cross:
+ if self.exe_wrapper is None:
+ # Can't check if the binaries run so we have to assume they do
+ return
+ cmdlist = self.exe_wrapper.get_command() + [output_name]
+ else:
+ cmdlist = [output_name]
+ if subprocess.call(cmdlist) != 0:
+ raise EnvironmentException('Executables created by D compiler %s are not runnable.' % self.name_string())
+
+ def needs_static_linker(self) -> bool:
+ return True
+
+ def get_depfile_suffix(self) -> str:
+ return 'deps'
+
+ def get_pic_args(self) -> T.List[str]:
+ if self.info.is_windows():
+ return []
+ return ['-fPIC']
+
+ def get_feature_args(self, kwargs: T.Dict[str, T.Any], build_to_src: str) -> T.List[str]:
+ # TODO: using a TypeDict here would improve this
+ res = []
+ # get_feature_args can be called multiple times for the same target when there is generated source
+ # so we have to copy the kwargs (target.d_features) dict before popping from it
+ kwargs = kwargs.copy()
+ if 'unittest' in kwargs:
+ unittest = kwargs.pop('unittest')
+ unittest_arg = d_feature_args[self.id]['unittest']
+ if not unittest_arg:
+ raise EnvironmentException('D compiler %s does not support the "unittest" feature.' % self.name_string())
+ if unittest:
+ res.append(unittest_arg)
+
+ if 'debug' in kwargs:
+ debug_level = -1
+ debugs = kwargs.pop('debug')
+ if not isinstance(debugs, list):
+ debugs = [debugs]
+
+ debug_arg = d_feature_args[self.id]['debug']
+ if not debug_arg:
+ raise EnvironmentException('D compiler %s does not support conditional debug identifiers.' % self.name_string())
+
+ # Parse all debug identifiers and the largest debug level identifier
+ for d in debugs:
+ if isinstance(d, int):
+ if d > debug_level:
+ debug_level = d
+ elif isinstance(d, str) and d.isdigit():
+ if int(d) > debug_level:
+ debug_level = int(d)
+ else:
+ res.append(f'{debug_arg}={d}')
+
+ if debug_level >= 0:
+ res.append(f'{debug_arg}={debug_level}')
+
+ if 'versions' in kwargs:
+ version_level = -1
+ versions = kwargs.pop('versions')
+ if not isinstance(versions, list):
+ versions = [versions]
+
+ version_arg = d_feature_args[self.id]['version']
+ if not version_arg:
+ raise EnvironmentException('D compiler %s does not support conditional version identifiers.' % self.name_string())
+
+ # Parse all version identifiers and the largest version level identifier
+ for v in versions:
+ if isinstance(v, int):
+ if v > version_level:
+ version_level = v
+ elif isinstance(v, str) and v.isdigit():
+ if int(v) > version_level:
+ version_level = int(v)
+ else:
+ res.append(f'{version_arg}={v}')
+
+ if version_level >= 0:
+ res.append(f'{version_arg}={version_level}')
+
+ if 'import_dirs' in kwargs:
+ import_dirs = kwargs.pop('import_dirs')
+ if not isinstance(import_dirs, list):
+ import_dirs = [import_dirs]
+
+ import_dir_arg = d_feature_args[self.id]['import_dir']
+ if not import_dir_arg:
+ raise EnvironmentException('D compiler %s does not support the "string import directories" feature.' % self.name_string())
+ for idir_obj in import_dirs:
+ basedir = idir_obj.get_curdir()
+ for idir in idir_obj.get_incdirs():
+ bldtreedir = os.path.join(basedir, idir)
+ # Avoid superfluous '/.' at the end of paths when d is '.'
+ if idir not in ('', '.'):
+ expdir = bldtreedir
+ else:
+ expdir = basedir
+ srctreedir = os.path.join(build_to_src, expdir)
+ res.append(f'{import_dir_arg}{srctreedir}')
+ res.append(f'{import_dir_arg}{bldtreedir}')
+
+ if kwargs:
+ raise EnvironmentException('Unknown D compiler feature(s) selected: %s' % ', '.join(kwargs.keys()))
+
+ return res
+
+ def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]:
+ if buildtype != 'plain':
+ return self._get_target_arch_args()
+ return []
+
+ def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> DCompilerArgs:
+ return DCompilerArgs(self, args)
+
+ def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
+ return self.compiles('int i;\n', env, extra_args=args)
+
+ def _get_target_arch_args(self) -> T.List[str]:
+ # LDC2 on Windows targets to current OS architecture, but
+ # it should follow the target specified by the MSVC toolchain.
+ if self.info.is_windows():
+ if self.arch == 'x86_64':
+ return ['-m64']
+ return ['-m32']
+ return []
+
+ def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+ return []
+
+ def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+ return []
+
+
+class GnuDCompiler(GnuCompiler, DCompiler):
+
+ # we mostly want DCompiler, but that gives us the Compiler.LINKER_PREFIX instead
+ LINKER_PREFIX = GnuCompiler.LINKER_PREFIX
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ info: 'MachineInfo', arch: str, *,
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None,
+ is_cross: bool = False):
+ DCompiler.__init__(self, exelist, version, for_machine, info, arch,
+ exe_wrapper=exe_wrapper, linker=linker,
+ full_version=full_version, is_cross=is_cross)
+ GnuCompiler.__init__(self, {})
+ self.id = 'gcc'
+ default_warn_args = ['-Wall', '-Wdeprecated']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+ self.base_options = {
+ OptionKey(o) for o in [
+ 'b_colorout', 'b_sanitize', 'b_staticpic', 'b_vscrt',
+ 'b_coverage', 'b_pgo', 'b_ndebug']}
+
+ self._has_color_support = version_compare(self.version, '>=4.9')
+ # dependencies were implemented before, but broken - support was fixed in GCC 7.1+
+ # (and some backported versions)
+ self._has_deps_support = version_compare(self.version, '>=7.1')
+
+ def get_colorout_args(self, colortype: str) -> T.List[str]:
+ if self._has_color_support:
+ super().get_colorout_args(colortype)
+ return []
+
+ def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+ if self._has_deps_support:
+ return super().get_dependency_gen_args(outtarget, outfile)
+ return []
+
+ def get_warn_args(self, level: str) -> T.List[str]:
+ return self.warn_args[level]
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return d_gdc_buildtype_args[buildtype]
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+ build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-I' or i[:2] == '-L':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+ return parameter_list
+
+ def get_allow_undefined_link_args(self) -> T.List[str]:
+ return self.linker.get_allow_undefined_args()
+
+ def get_linker_always_args(self) -> T.List[str]:
+ args = super().get_linker_always_args()
+ if self.info.is_windows():
+ return args
+ return args + ['-shared-libphobos']
+
+ def get_disable_assert_args(self) -> T.List[str]:
+ return ['-frelease']
+
+# LDC uses the DMD frontend code to parse and analyse the code.
+# It then uses LLVM for the binary code generation and optimizations.
+# This function retrieves the dmd frontend version, which determines
+# the common features between LDC and DMD.
+# We need the complete version text because the match is not on first line
+# of version_output
+def find_ldc_dmd_frontend_version(version_output: T.Optional[str]) -> T.Optional[str]:
+ if version_output is None:
+ return None
+ version_regex = re.search(r'DMD v(\d+\.\d+\.\d+)', version_output)
+ if version_regex:
+ return version_regex.group(1)
+ return None
+
+class LLVMDCompiler(DmdLikeCompilerMixin, DCompiler):
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ info: 'MachineInfo', arch: str, *,
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None,
+ is_cross: bool = False, version_output: T.Optional[str] = None):
+ DCompiler.__init__(self, exelist, version, for_machine, info, arch,
+ exe_wrapper=exe_wrapper, linker=linker,
+ full_version=full_version, is_cross=is_cross)
+ DmdLikeCompilerMixin.__init__(self, dmd_frontend_version=find_ldc_dmd_frontend_version(version_output))
+ self.id = 'llvm'
+ self.base_options = {OptionKey(o) for o in ['b_coverage', 'b_colorout', 'b_vscrt', 'b_ndebug']}
+
+ def get_colorout_args(self, colortype: str) -> T.List[str]:
+ if colortype == 'always':
+ return ['-enable-color']
+ return []
+
+ def get_warn_args(self, level: str) -> T.List[str]:
+ if level in {'2', '3'}:
+ return ['-wi', '-dw']
+ elif level == '1':
+ return ['-wi']
+ return []
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ if buildtype != 'plain':
+ return self._get_target_arch_args() + d_ldc_buildtype_args[buildtype]
+ return d_ldc_buildtype_args[buildtype]
+
+ def get_pic_args(self) -> T.List[str]:
+ return ['-relocation-model=pic']
+
+ def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+ return self._get_crt_args(crt_val, buildtype)
+
+ def unix_args_to_native(self, args: T.List[str]) -> T.List[str]:
+ return self._translate_args_to_nongnu(args)
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return ldc_optimization_args[optimization_level]
+
+ @classmethod
+ def use_linker_args(cls, linker: str) -> T.List[str]:
+ return [f'-linker={linker}']
+
+ def get_linker_always_args(self) -> T.List[str]:
+ args = super().get_linker_always_args()
+ if self.info.is_windows():
+ return args
+ return args + ['-link-defaultlib-shared']
+
+ def get_disable_assert_args(self) -> T.List[str]:
+ return ['--release']
+
+ def rsp_file_syntax(self) -> RSPFileSyntax:
+ # We use `mesonlib.is_windows` here because we want to konw what the
+ # build machine is, not the host machine. This really means whe whould
+ # have the Environment not the MachineInfo in the compiler.
+ return RSPFileSyntax.MSVC if is_windows() else RSPFileSyntax.GCC
+
+
+class DmdDCompiler(DmdLikeCompilerMixin, DCompiler):
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ info: 'MachineInfo', arch: str, *,
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None,
+ is_cross: bool = False):
+ DCompiler.__init__(self, exelist, version, for_machine, info, arch,
+ exe_wrapper=exe_wrapper, linker=linker,
+ full_version=full_version, is_cross=is_cross)
+ DmdLikeCompilerMixin.__init__(self, version)
+ self.id = 'dmd'
+ self.base_options = {OptionKey(o) for o in ['b_coverage', 'b_colorout', 'b_vscrt', 'b_ndebug']}
+
+ def get_colorout_args(self, colortype: str) -> T.List[str]:
+ if colortype == 'always':
+ return ['-color=on']
+ return []
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ if buildtype != 'plain':
+ return self._get_target_arch_args() + d_dmd_buildtype_args[buildtype]
+ return d_dmd_buildtype_args[buildtype]
+
+ def get_std_exe_link_args(self) -> T.List[str]:
+ if self.info.is_windows():
+ # DMD links against D runtime only when main symbol is found,
+ # so these needs to be inserted when linking static D libraries.
+ if self.arch == 'x86_64':
+ return ['phobos64.lib']
+ elif self.arch == 'x86_mscoff':
+ return ['phobos32mscoff.lib']
+ return ['phobos.lib']
+ return []
+
+ def get_std_shared_lib_link_args(self) -> T.List[str]:
+ libname = 'libphobos2.so'
+ if self.info.is_windows():
+ if self.arch == 'x86_64':
+ libname = 'phobos64.lib'
+ elif self.arch == 'x86_mscoff':
+ libname = 'phobos32mscoff.lib'
+ else:
+ libname = 'phobos.lib'
+ return ['-shared', '-defaultlib=' + libname]
+
+ def _get_target_arch_args(self) -> T.List[str]:
+ # DMD32 and DMD64 on 64-bit Windows defaults to 32-bit (OMF).
+ # Force the target to 64-bit in order to stay consistent
+ # across the different platforms.
+ if self.info.is_windows():
+ if self.arch == 'x86_64':
+ return ['-m64']
+ elif self.arch == 'x86_mscoff':
+ return ['-m32mscoff']
+ return ['-m32']
+ return []
+
+ def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+ return self._get_crt_args(crt_val, buildtype)
+
+ def unix_args_to_native(self, args: T.List[str]) -> T.List[str]:
+ return self._translate_args_to_nongnu(args)
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return dmd_optimization_args[optimization_level]
+
+ def can_linker_accept_rsp(self) -> bool:
+ return False
+
+ def get_linker_always_args(self) -> T.List[str]:
+ args = super().get_linker_always_args()
+ if self.info.is_windows():
+ return args
+ return args + ['-defaultlib=phobos2', '-debuglib=phobos2']
+
+ def get_disable_assert_args(self) -> T.List[str]:
+ return ['-release']
+
+ def rsp_file_syntax(self) -> RSPFileSyntax:
+ return RSPFileSyntax.MSVC
diff --git a/meson/mesonbuild/compilers/detect.py b/meson/mesonbuild/compilers/detect.py
new file mode 100644
index 000000000..22cf43b6d
--- /dev/null
+++ b/meson/mesonbuild/compilers/detect.py
@@ -0,0 +1,1219 @@
+# Copyright 2012-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ..mesonlib import (
+ MachineChoice, MesonException, EnvironmentException,
+ search_version, is_windows, Popen_safe, windows_proof_rm,
+)
+from ..envconfig import BinaryTable
+from .. import mlog
+
+from ..linkers import (
+ guess_win_linker,
+ guess_nix_linker,
+ AIXArLinker,
+ ArLinker,
+ ArmarLinker,
+ ArmClangDynamicLinker,
+ ArmDynamicLinker,
+ CcrxLinker,
+ CcrxDynamicLinker,
+ CompCertLinker,
+ CompCertDynamicLinker,
+ C2000Linker,
+ C2000DynamicLinker,
+ DLinker,
+ NvidiaHPC_DynamicLinker,
+ PGIDynamicLinker,
+ PGIStaticLinker,
+ StaticLinker,
+ Xc16Linker,
+ Xc16DynamicLinker,
+ XilinkDynamicLinker,
+ CudaLinker,
+ IntelVisualStudioLinker,
+ VisualStudioLinker,
+ VisualStudioLikeLinkerMixin,
+ WASMDynamicLinker,
+)
+from .compilers import Compiler
+from .c import (
+ CCompiler,
+ AppleClangCCompiler,
+ ArmCCompiler,
+ ArmclangCCompiler,
+ ClangCCompiler,
+ ClangClCCompiler,
+ GnuCCompiler,
+ ElbrusCCompiler,
+ EmscriptenCCompiler,
+ IntelCCompiler,
+ IntelClCCompiler,
+ NvidiaHPC_CCompiler,
+ PGICCompiler,
+ CcrxCCompiler,
+ Xc16CCompiler,
+ CompCertCCompiler,
+ C2000CCompiler,
+ VisualStudioCCompiler,
+)
+from .cpp import (
+ CPPCompiler,
+ AppleClangCPPCompiler,
+ ArmCPPCompiler,
+ ArmclangCPPCompiler,
+ ClangCPPCompiler,
+ ClangClCPPCompiler,
+ GnuCPPCompiler,
+ ElbrusCPPCompiler,
+ EmscriptenCPPCompiler,
+ IntelCPPCompiler,
+ IntelClCPPCompiler,
+ NvidiaHPC_CPPCompiler,
+ PGICPPCompiler,
+ CcrxCPPCompiler,
+ C2000CPPCompiler,
+ VisualStudioCPPCompiler,
+)
+from .cs import MonoCompiler, VisualStudioCsCompiler
+from .d import (
+ DCompiler,
+ DmdDCompiler,
+ GnuDCompiler,
+ LLVMDCompiler,
+)
+from .cuda import CudaCompiler
+from .fortran import (
+ FortranCompiler,
+ G95FortranCompiler,
+ GnuFortranCompiler,
+ ElbrusFortranCompiler,
+ FlangFortranCompiler,
+ IntelFortranCompiler,
+ IntelClFortranCompiler,
+ NAGFortranCompiler,
+ Open64FortranCompiler,
+ PathScaleFortranCompiler,
+ NvidiaHPC_FortranCompiler,
+ PGIFortranCompiler,
+ SunFortranCompiler,
+)
+from .java import JavaCompiler
+from .objc import (
+ ObjCCompiler,
+ AppleClangObjCCompiler,
+ ClangObjCCompiler,
+ GnuObjCCompiler,
+)
+from .objcpp import (
+ ObjCPPCompiler,
+ AppleClangObjCPPCompiler,
+ ClangObjCPPCompiler,
+ GnuObjCPPCompiler,
+)
+from .cython import CythonCompiler
+from .rust import RustCompiler
+from .swift import SwiftCompiler
+from .vala import ValaCompiler
+from .mixins.visualstudio import VisualStudioLikeCompiler
+from .mixins.gnu import GnuCompiler
+from .mixins.clang import ClangCompiler
+
+import subprocess
+import platform
+import re
+import shutil
+import tempfile
+import os
+import typing as T
+
+if T.TYPE_CHECKING:
+ from ..environment import Environment
+ from ..programs import ExternalProgram
+ from .compilers import CompilerType
+
+
+
+# Default compilers and linkers
+# =============================
+
+defaults: T.Dict[str, T.List[str]] = {}
+
+# List of potential compilers.
+if is_windows():
+ # Intel C and C++ compiler is icl on Windows, but icc and icpc elsewhere.
+ # Search for icl before cl, since Intel "helpfully" provides a
+ # cl.exe that returns *exactly the same thing* that microsofts
+ # cl.exe does, and if icl is present, it's almost certainly what
+ # you want.
+ defaults['c'] = ['icl', 'cl', 'cc', 'gcc', 'clang', 'clang-cl', 'pgcc']
+ # There is currently no pgc++ for Windows, only for Mac and Linux.
+ defaults['cpp'] = ['icl', 'cl', 'c++', 'g++', 'clang++', 'clang-cl']
+ defaults['fortran'] = ['ifort', 'gfortran', 'flang', 'pgfortran', 'g95']
+ # Clang and clang++ are valid, but currently unsupported.
+ defaults['objc'] = ['cc', 'gcc']
+ defaults['objcpp'] = ['c++', 'g++']
+ defaults['cs'] = ['csc', 'mcs']
+else:
+ if platform.machine().lower() == 'e2k':
+ # There are no objc or objc++ compilers for Elbrus,
+ # and there's no clang which can build binaries for host.
+ defaults['c'] = ['cc', 'gcc', 'lcc']
+ defaults['cpp'] = ['c++', 'g++', 'l++']
+ defaults['objc'] = []
+ defaults['objcpp'] = []
+ else:
+ defaults['c'] = ['cc', 'gcc', 'clang', 'nvc', 'pgcc', 'icc']
+ defaults['cpp'] = ['c++', 'g++', 'clang++', 'nvc++', 'pgc++', 'icpc']
+ defaults['objc'] = ['cc', 'gcc', 'clang']
+ defaults['objcpp'] = ['c++', 'g++', 'clang++']
+ defaults['fortran'] = ['gfortran', 'flang', 'nvfortran', 'pgfortran', 'ifort', 'g95']
+ defaults['cs'] = ['mcs', 'csc']
+defaults['d'] = ['ldc2', 'ldc', 'gdc', 'dmd']
+defaults['java'] = ['javac']
+defaults['cuda'] = ['nvcc']
+defaults['rust'] = ['rustc']
+defaults['swift'] = ['swiftc']
+defaults['vala'] = ['valac']
+defaults['cython'] = ['cython']
+defaults['static_linker'] = ['ar', 'gar']
+defaults['strip'] = ['strip']
+defaults['vs_static_linker'] = ['lib']
+defaults['clang_cl_static_linker'] = ['llvm-lib']
+defaults['cuda_static_linker'] = ['nvlink']
+defaults['gcc_static_linker'] = ['gcc-ar']
+defaults['clang_static_linker'] = ['llvm-ar']
+
+
+def compiler_from_language(env: 'Environment', lang: str, for_machine: MachineChoice) -> T.Optional[Compiler]:
+ lang_map: T.Dict[str, T.Callable[['Environment', MachineChoice], Compiler]] = {
+ 'c': detect_c_compiler,
+ 'cpp': detect_cpp_compiler,
+ 'objc': detect_objc_compiler,
+ 'cuda': detect_cuda_compiler,
+ 'objcpp': detect_objcpp_compiler,
+ 'java': detect_java_compiler,
+ 'cs': detect_cs_compiler,
+ 'vala': detect_vala_compiler,
+ 'd': detect_d_compiler,
+ 'rust': detect_rust_compiler,
+ 'fortran': detect_fortran_compiler,
+ 'swift': detect_swift_compiler,
+ 'cython': detect_cython_compiler,
+ }
+ return lang_map[lang](env, for_machine) if lang in lang_map else None
+
+def detect_compiler_for(env: 'Environment', lang: str, for_machine: MachineChoice)-> T.Optional[Compiler]:
+ comp = compiler_from_language(env, lang, for_machine)
+ if comp is not None:
+ assert comp.for_machine == for_machine
+ env.coredata.process_new_compiler(lang, comp, env)
+ return comp
+
+
+# Helpers
+# =======
+
+def _get_compilers(env: 'Environment', lang: str, for_machine: MachineChoice) -> T.Tuple[T.List[T.List[str]], T.List[str], T.Optional['ExternalProgram']]:
+ '''
+ The list of compilers is detected in the exact same way for
+ C, C++, ObjC, ObjC++, Fortran, CS so consolidate it here.
+ '''
+ value = env.lookup_binary_entry(for_machine, lang)
+ if value is not None:
+ comp, ccache = BinaryTable.parse_entry(value)
+ # Return value has to be a list of compiler 'choices'
+ compilers = [comp]
+ else:
+ if not env.machines.matches_build_machine(for_machine):
+ raise EnvironmentException(f'{lang!r} compiler binary not defined in cross or native file')
+ compilers = [[x] for x in defaults[lang]]
+ ccache = BinaryTable.detect_ccache()
+
+ if env.machines.matches_build_machine(for_machine):
+ exe_wrap: T.Optional[ExternalProgram] = None
+ else:
+ exe_wrap = env.get_exe_wrapper()
+
+ return compilers, ccache, exe_wrap
+
+def _handle_exceptions(
+ exceptions: T.Mapping[str, T.Union[Exception, str]],
+ binaries: T.List[T.List[str]],
+ bintype: str = 'compiler'
+ ) -> T.NoReturn:
+ errmsg = f'Unknown {bintype}(s): {binaries}'
+ if exceptions:
+ errmsg += '\nThe following exception(s) were encountered:'
+ for c, e in exceptions.items():
+ errmsg += f'\nRunning "{c}" gave "{e}"'
+ raise EnvironmentException(errmsg)
+
+
+# Linker specific
+# ===============
+
+def detect_static_linker(env: 'Environment', compiler: Compiler) -> StaticLinker:
+ linker = env.lookup_binary_entry(compiler.for_machine, 'ar')
+ if linker is not None:
+ linkers = [linker]
+ else:
+ default_linkers = [[l] for l in defaults['static_linker']]
+ if isinstance(compiler, CudaCompiler):
+ linkers = [defaults['cuda_static_linker']] + default_linkers
+ elif isinstance(compiler, VisualStudioLikeCompiler):
+ linkers = [defaults['vs_static_linker'], defaults['clang_cl_static_linker']]
+ elif isinstance(compiler, GnuCompiler):
+ # Use gcc-ar if available; needed for LTO
+ linkers = [defaults['gcc_static_linker']] + default_linkers
+ elif isinstance(compiler, ClangCompiler):
+ # Use llvm-ar if available; needed for LTO
+ linkers = [defaults['clang_static_linker']] + default_linkers
+ elif isinstance(compiler, DCompiler):
+ # Prefer static linkers over linkers used by D compilers
+ if is_windows():
+ linkers = [defaults['vs_static_linker'], defaults['clang_cl_static_linker'], compiler.get_linker_exelist()]
+ else:
+ linkers = default_linkers
+ elif isinstance(compiler, IntelClCCompiler):
+ # Intel has it's own linker that acts like microsoft's lib
+ linkers = [['xilib']]
+ elif isinstance(compiler, (PGICCompiler, PGIFortranCompiler)) and is_windows():
+ linkers = [['ar']] # For PGI on Windows, "ar" is just a wrapper calling link/lib.
+ else:
+ linkers = default_linkers
+ popen_exceptions = {}
+ for linker in linkers:
+ if not {'lib', 'lib.exe', 'llvm-lib', 'llvm-lib.exe', 'xilib', 'xilib.exe'}.isdisjoint(linker):
+ arg = '/?'
+ elif not {'ar2000', 'ar2000.exe'}.isdisjoint(linker):
+ arg = '?'
+ else:
+ arg = '--version'
+ try:
+ p, out, err = Popen_safe(linker + [arg])
+ except OSError as e:
+ popen_exceptions[' '.join(linker + [arg])] = e
+ continue
+ if "xilib: executing 'lib'" in err:
+ return IntelVisualStudioLinker(linker, getattr(compiler, 'machine', None))
+ if '/OUT:' in out.upper() or '/OUT:' in err.upper():
+ return VisualStudioLinker(linker, getattr(compiler, 'machine', None))
+ if 'ar-Error-Unknown switch: --version' in err:
+ return PGIStaticLinker(linker)
+ if p.returncode == 0 and ('armar' in linker or 'armar.exe' in linker):
+ return ArmarLinker(linker)
+ if 'DMD32 D Compiler' in out or 'DMD64 D Compiler' in out:
+ assert isinstance(compiler, DCompiler)
+ return DLinker(linker, compiler.arch)
+ if 'LDC - the LLVM D compiler' in out:
+ assert isinstance(compiler, DCompiler)
+ return DLinker(linker, compiler.arch, rsp_syntax=compiler.rsp_file_syntax())
+ if 'GDC' in out and ' based on D ' in out:
+ assert isinstance(compiler, DCompiler)
+ return DLinker(linker, compiler.arch)
+ if err.startswith('Renesas') and ('rlink' in linker or 'rlink.exe' in linker):
+ return CcrxLinker(linker)
+ if out.startswith('GNU ar') and ('xc16-ar' in linker or 'xc16-ar.exe' in linker):
+ return Xc16Linker(linker)
+ if out.startswith('TMS320C2000') and ('ar2000' in linker or 'ar2000.exe' in linker):
+ return C2000Linker(linker)
+ if out.startswith('The CompCert'):
+ return CompCertLinker(linker)
+ if p.returncode == 0:
+ return ArLinker(linker)
+ if p.returncode == 1 and err.startswith('usage'): # OSX
+ return ArLinker(linker)
+ if p.returncode == 1 and err.startswith('Usage'): # AIX
+ return AIXArLinker(linker)
+ if p.returncode == 1 and err.startswith('ar: bad option: --'): # Solaris
+ return ArLinker(linker)
+ _handle_exceptions(popen_exceptions, linkers, 'linker')
+
+
+
+# Compilers
+# =========
+
+
+def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: MachineChoice, *, override_compiler: T.Optional[T.List[str]] = None) -> Compiler:
+ """Shared implementation for finding the C or C++ compiler to use.
+
+ the override_compiler option is provided to allow compilers which use
+ the compiler (GCC or Clang usually) as their shared linker, to find
+ the linker they need.
+ """
+ popen_exceptions: T.Dict[str, T.Union[Exception, str]] = {}
+ compilers, ccache, exe_wrap = _get_compilers(env, lang, for_machine)
+ if override_compiler is not None:
+ compilers = [override_compiler]
+ is_cross = env.is_cross_build(for_machine)
+ info = env.machines[for_machine]
+ cls: T.Union[T.Type[CCompiler], T.Type[CPPCompiler]]
+
+ for compiler in compilers:
+ if isinstance(compiler, str):
+ compiler = [compiler]
+ compiler_name = os.path.basename(compiler[0])
+
+ if any(os.path.basename(x) in {'cl', 'cl.exe', 'clang-cl', 'clang-cl.exe'} for x in compiler):
+ # Watcom C provides it's own cl.exe clone that mimics an older
+ # version of Microsoft's compiler. Since Watcom's cl.exe is
+ # just a wrapper, we skip using it if we detect its presence
+ # so as not to confuse Meson when configuring for MSVC.
+ #
+ # Additionally the help text of Watcom's cl.exe is paged, and
+ # the binary will not exit without human intervention. In
+ # practice, Meson will block waiting for Watcom's cl.exe to
+ # exit, which requires user input and thus will never exit.
+ if 'WATCOM' in os.environ:
+ def sanitize(p: str) -> str:
+ return os.path.normcase(os.path.abspath(p))
+
+ watcom_cls = [sanitize(os.path.join(os.environ['WATCOM'], 'BINNT', 'cl')),
+ sanitize(os.path.join(os.environ['WATCOM'], 'BINNT', 'cl.exe'))]
+ found_cl = sanitize(shutil.which('cl'))
+ if found_cl in watcom_cls:
+ continue
+ arg = '/?'
+ elif 'armcc' in compiler_name:
+ arg = '--vsn'
+ elif 'ccrx' in compiler_name:
+ arg = '-v'
+ elif 'xc16' in compiler_name:
+ arg = '--version'
+ elif 'ccomp' in compiler_name:
+ arg = '-version'
+ elif 'cl2000' in compiler_name:
+ arg = '-version'
+ elif compiler_name in {'icl', 'icl.exe'}:
+ # if you pass anything to icl you get stuck in a pager
+ arg = ''
+ else:
+ arg = '--version'
+
+ try:
+ p, out, err = Popen_safe(compiler + [arg])
+ except OSError as e:
+ popen_exceptions[' '.join(compiler + [arg])] = e
+ continue
+
+ if 'ccrx' in compiler_name:
+ out = err
+
+ full_version = out.split('\n', 1)[0]
+ version = search_version(out)
+
+ guess_gcc_or_lcc: T.Optional[str] = None
+ if 'Free Software Foundation' in out or 'xt-' in out:
+ guess_gcc_or_lcc = 'gcc'
+ if 'e2k' in out and 'lcc' in out:
+ guess_gcc_or_lcc = 'lcc'
+ if 'Microchip Technology' in out:
+ # this output has "Free Software Foundation" in its version
+ guess_gcc_or_lcc = None
+
+ if guess_gcc_or_lcc:
+ defines = _get_gnu_compiler_defines(compiler)
+ if not defines:
+ popen_exceptions[' '.join(compiler)] = 'no pre-processor defines'
+ continue
+
+ if guess_gcc_or_lcc == 'lcc':
+ version = _get_lcc_version_from_defines(defines)
+ cls = ElbrusCCompiler if lang == 'c' else ElbrusCPPCompiler
+ else:
+ version = _get_gnu_version_from_defines(defines)
+ cls = GnuCCompiler if lang == 'c' else GnuCPPCompiler
+
+ linker = guess_nix_linker(env, compiler, cls, for_machine)
+
+ return cls(
+ ccache + compiler, version, for_machine, is_cross,
+ info, exe_wrap, defines=defines, full_version=full_version,
+ linker=linker)
+
+ if 'Emscripten' in out:
+ cls = EmscriptenCCompiler if lang == 'c' else EmscriptenCPPCompiler
+ env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+
+ # emcc requires a file input in order to pass arguments to the
+ # linker. It'll exit with an error code, but still print the
+ # linker version. Old emcc versions ignore -Wl,--version completely,
+ # however. We'll report "unknown version" in that case.
+ with tempfile.NamedTemporaryFile(suffix='.c') as f:
+ cmd = compiler + [cls.LINKER_PREFIX + "--version", f.name]
+ _, o, _ = Popen_safe(cmd)
+
+ linker = WASMDynamicLinker(
+ compiler, for_machine, cls.LINKER_PREFIX,
+ [], version=search_version(o))
+ return cls(
+ ccache + compiler, version, for_machine, is_cross, info,
+ exe_wrap, linker=linker, full_version=full_version)
+
+ if 'armclang' in out:
+ # The compiler version is not present in the first line of output,
+ # instead it is present in second line, startswith 'Component:'.
+ # So, searching for the 'Component' in out although we know it is
+ # present in second line, as we are not sure about the
+ # output format in future versions
+ arm_ver_match = re.search('.*Component.*', out)
+ if arm_ver_match is None:
+ popen_exceptions[' '.join(compiler)] = 'version string not found'
+ continue
+ arm_ver_str = arm_ver_match.group(0)
+ # Override previous values
+ version = search_version(arm_ver_str)
+ full_version = arm_ver_str
+ cls = ArmclangCCompiler if lang == 'c' else ArmclangCPPCompiler
+ linker = ArmClangDynamicLinker(for_machine, version=version)
+ env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ return cls(
+ ccache + compiler, version, for_machine, is_cross, info,
+ exe_wrap, full_version=full_version, linker=linker)
+ if 'CL.EXE COMPATIBILITY' in out:
+ # if this is clang-cl masquerading as cl, detect it as cl, not
+ # clang
+ arg = '--version'
+ try:
+ p, out, err = Popen_safe(compiler + [arg])
+ except OSError as e:
+ popen_exceptions[' '.join(compiler + [arg])] = e
+ version = search_version(out)
+ match = re.search('^Target: (.*?)-', out, re.MULTILINE)
+ if match:
+ target = match.group(1)
+ else:
+ target = 'unknown target'
+ cls = ClangClCCompiler if lang == 'c' else ClangClCPPCompiler
+ linker = guess_win_linker(env, ['lld-link'], cls, for_machine)
+ return cls(
+ compiler, version, for_machine, is_cross, info, target,
+ exe_wrap, linker=linker)
+ if 'clang' in out or 'Clang' in out:
+ linker = None
+
+ defines = _get_clang_compiler_defines(compiler)
+
+ # Even if the for_machine is darwin, we could be using vanilla
+ # clang.
+ if 'Apple' in out:
+ cls = AppleClangCCompiler if lang == 'c' else AppleClangCPPCompiler
+ else:
+ cls = ClangCCompiler if lang == 'c' else ClangCPPCompiler
+
+ if 'windows' in out or env.machines[for_machine].is_windows():
+ # If we're in a MINGW context this actually will use a gnu
+ # style ld, but for clang on "real" windows we'll use
+ # either link.exe or lld-link.exe
+ try:
+ linker = guess_win_linker(env, compiler, cls, for_machine, invoked_directly=False)
+ except MesonException:
+ pass
+ if linker is None:
+ linker = guess_nix_linker(env, compiler, cls, for_machine)
+
+ return cls(
+ ccache + compiler, version, for_machine, is_cross, info,
+ exe_wrap, defines=defines, full_version=full_version, linker=linker)
+
+ if 'Intel(R) C++ Intel(R)' in err:
+ version = search_version(err)
+ target = 'x86' if 'IA-32' in err else 'x86_64'
+ cls = IntelClCCompiler if lang == 'c' else IntelClCPPCompiler
+ env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ linker = XilinkDynamicLinker(for_machine, [], version=version)
+ return cls(
+ compiler, version, for_machine, is_cross, info, target,
+ exe_wrap, linker=linker)
+ if 'Microsoft' in out or 'Microsoft' in err:
+ # Latest versions of Visual Studio print version
+ # number to stderr but earlier ones print version
+ # on stdout. Why? Lord only knows.
+ # Check both outputs to figure out version.
+ for lookat in [err, out]:
+ version = search_version(lookat)
+ if version != 'unknown version':
+ break
+ else:
+ raise EnvironmentException(f'Failed to detect MSVC compiler version: stderr was\n{err!r}')
+ cl_signature = lookat.split('\n')[0]
+ match = re.search(r'.*(x86|x64|ARM|ARM64)([^_A-Za-z0-9]|$)', cl_signature)
+ if match:
+ target = match.group(1)
+ else:
+ m = f'Failed to detect MSVC compiler target architecture: \'cl /?\' output is\n{cl_signature}'
+ raise EnvironmentException(m)
+ cls = VisualStudioCCompiler if lang == 'c' else VisualStudioCPPCompiler
+ linker = guess_win_linker(env, ['link'], cls, for_machine)
+ return cls(
+ compiler, version, for_machine, is_cross, info, target,
+ exe_wrap, full_version=cl_signature, linker=linker)
+ if 'PGI Compilers' in out:
+ cls = PGICCompiler if lang == 'c' else PGICPPCompiler
+ env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ linker = PGIDynamicLinker(compiler, for_machine, cls.LINKER_PREFIX, [], version=version)
+ return cls(
+ ccache + compiler, version, for_machine, is_cross,
+ info, exe_wrap, linker=linker)
+ if 'NVIDIA Compilers and Tools' in out:
+ cls = NvidiaHPC_CCompiler if lang == 'c' else NvidiaHPC_CPPCompiler
+ env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ linker = NvidiaHPC_DynamicLinker(compiler, for_machine, cls.LINKER_PREFIX, [], version=version)
+ return cls(
+ ccache + compiler, version, for_machine, is_cross,
+ info, exe_wrap, linker=linker)
+ if '(ICC)' in out:
+ cls = IntelCCompiler if lang == 'c' else IntelCPPCompiler
+ l = guess_nix_linker(env, compiler, cls, for_machine)
+ return cls(
+ ccache + compiler, version, for_machine, is_cross, info,
+ exe_wrap, full_version=full_version, linker=l)
+ if 'ARM' in out:
+ cls = ArmCCompiler if lang == 'c' else ArmCPPCompiler
+ env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ linker = ArmDynamicLinker(for_machine, version=version)
+ return cls(
+ ccache + compiler, version, for_machine, is_cross,
+ info, exe_wrap, full_version=full_version, linker=linker)
+ if 'RX Family' in out:
+ cls = CcrxCCompiler if lang == 'c' else CcrxCPPCompiler
+ env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ linker = CcrxDynamicLinker(for_machine, version=version)
+ return cls(
+ ccache + compiler, version, for_machine, is_cross, info,
+ exe_wrap, full_version=full_version, linker=linker)
+
+ if 'Microchip Technology' in out:
+ cls = Xc16CCompiler if lang == 'c' else Xc16CCompiler
+ env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ linker = Xc16DynamicLinker(for_machine, version=version)
+ return cls(
+ ccache + compiler, version, for_machine, is_cross, info,
+ exe_wrap, full_version=full_version, linker=linker)
+
+ if 'CompCert' in out:
+ cls = CompCertCCompiler
+ env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ linker = CompCertDynamicLinker(for_machine, version=version)
+ return cls(
+ ccache + compiler, version, for_machine, is_cross, info,
+ exe_wrap, full_version=full_version, linker=linker)
+
+ if 'TMS320C2000 C/C++' in out:
+ cls = C2000CCompiler if lang == 'c' else C2000CPPCompiler
+ env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ linker = C2000DynamicLinker(compiler, for_machine, version=version)
+ return cls(
+ ccache + compiler, version, for_machine, is_cross, info,
+ exe_wrap, full_version=full_version, linker=linker)
+
+
+ _handle_exceptions(popen_exceptions, compilers)
+ raise EnvironmentException(f'Unknown compiler {compilers}')
+
+def detect_c_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler:
+ return _detect_c_or_cpp_compiler(env, 'c', for_machine)
+
+def detect_cpp_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler:
+ return _detect_c_or_cpp_compiler(env, 'cpp', for_machine)
+
+def detect_cuda_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler:
+ popen_exceptions = {}
+ is_cross = env.is_cross_build(for_machine)
+ compilers, ccache, exe_wrap = _get_compilers(env, 'cuda', for_machine)
+ info = env.machines[for_machine]
+ for compiler in compilers:
+ arg = '--version'
+ try:
+ p, out, err = Popen_safe(compiler + [arg])
+ except OSError as e:
+ popen_exceptions[' '.join(compiler + [arg])] = e
+ continue
+ # Example nvcc printout:
+ #
+ # nvcc: NVIDIA (R) Cuda compiler driver
+ # Copyright (c) 2005-2018 NVIDIA Corporation
+ # Built on Sat_Aug_25_21:08:01_CDT_2018
+ # Cuda compilation tools, release 10.0, V10.0.130
+ #
+ # search_version() first finds the "10.0" after "release",
+ # rather than the more precise "10.0.130" after "V".
+ # The patch version number is occasionally important; For
+ # instance, on Linux,
+ # - CUDA Toolkit 8.0.44 requires NVIDIA Driver 367.48
+ # - CUDA Toolkit 8.0.61 requires NVIDIA Driver 375.26
+ # Luckily, the "V" also makes it very simple to extract
+ # the full version:
+ version = out.strip().split('V')[-1]
+ cpp_compiler = detect_cpp_compiler(env, for_machine)
+ cls = CudaCompiler
+ env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ linker = CudaLinker(compiler, for_machine, CudaCompiler.LINKER_PREFIX, [], version=CudaLinker.parse_version())
+ return cls(ccache + compiler, version, for_machine, is_cross, exe_wrap, host_compiler=cpp_compiler, info=info, linker=linker)
+ raise EnvironmentException(f'Could not find suitable CUDA compiler: "{"; ".join([" ".join(c) for c in compilers])}"')
+
+def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler:
+ popen_exceptions: T.Dict[str, T.Union[Exception, str]] = {}
+ compilers, ccache, exe_wrap = _get_compilers(env, 'fortran', for_machine)
+ is_cross = env.is_cross_build(for_machine)
+ info = env.machines[for_machine]
+ cls: T.Type[FortranCompiler]
+ for compiler in compilers:
+ for arg in ['--version', '-V']:
+ try:
+ p, out, err = Popen_safe(compiler + [arg])
+ except OSError as e:
+ popen_exceptions[' '.join(compiler + [arg])] = e
+ continue
+
+ version = search_version(out)
+ full_version = out.split('\n', 1)[0]
+
+ guess_gcc_or_lcc: T.Optional[str] = None
+ if 'GNU Fortran' in out:
+ guess_gcc_or_lcc = 'gcc'
+ if 'e2k' in out and 'lcc' in out:
+ guess_gcc_or_lcc = 'lcc'
+
+ if guess_gcc_or_lcc:
+ defines = _get_gnu_compiler_defines(compiler)
+ if not defines:
+ popen_exceptions[' '.join(compiler)] = 'no pre-processor defines'
+ continue
+ if guess_gcc_or_lcc == 'lcc':
+ version = _get_lcc_version_from_defines(defines)
+ cls = ElbrusFortranCompiler
+ else:
+ version = _get_gnu_version_from_defines(defines)
+ cls = GnuFortranCompiler
+ linker = guess_nix_linker(env, compiler, cls, for_machine)
+ return cls(
+ compiler, version, for_machine, is_cross, info,
+ exe_wrap, defines, full_version=full_version,
+ linker=linker)
+
+ if 'G95' in out:
+ cls = G95FortranCompiler
+ linker = guess_nix_linker(env, compiler, cls, for_machine)
+ return G95FortranCompiler(
+ compiler, version, for_machine, is_cross, info,
+ exe_wrap, full_version=full_version, linker=linker)
+
+ if 'Sun Fortran' in err:
+ version = search_version(err)
+ cls = SunFortranCompiler
+ linker = guess_nix_linker(env, compiler, cls, for_machine)
+ return SunFortranCompiler(
+ compiler, version, for_machine, is_cross, info,
+ exe_wrap, full_version=full_version, linker=linker)
+
+ if 'Intel(R) Visual Fortran' in err or 'Intel(R) Fortran' in err:
+ version = search_version(err)
+ target = 'x86' if 'IA-32' in err else 'x86_64'
+ cls = IntelClFortranCompiler
+ env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ linker = XilinkDynamicLinker(for_machine, [], version=version)
+ return cls(
+ compiler, version, for_machine, is_cross, info,
+ target, exe_wrap, linker=linker)
+
+ if 'ifort (IFORT)' in out:
+ linker = guess_nix_linker(env, compiler, IntelFortranCompiler, for_machine)
+ return IntelFortranCompiler(
+ compiler, version, for_machine, is_cross, info,
+ exe_wrap, full_version=full_version, linker=linker)
+
+ if 'PathScale EKOPath(tm)' in err:
+ return PathScaleFortranCompiler(
+ compiler, version, for_machine, is_cross, info,
+ exe_wrap, full_version=full_version)
+
+ if 'PGI Compilers' in out:
+ cls = PGIFortranCompiler
+ env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ linker = PGIDynamicLinker(compiler, for_machine,
+ cls.LINKER_PREFIX, [], version=version)
+ return cls(
+ compiler, version, for_machine, is_cross, info, exe_wrap,
+ full_version=full_version, linker=linker)
+
+ if 'NVIDIA Compilers and Tools' in out:
+ cls = NvidiaHPC_FortranCompiler
+ env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ linker = PGIDynamicLinker(compiler, for_machine,
+ cls.LINKER_PREFIX, [], version=version)
+ return cls(
+ compiler, version, for_machine, is_cross, info, exe_wrap,
+ full_version=full_version, linker=linker)
+
+ if 'flang' in out or 'clang' in out:
+ linker = guess_nix_linker(env,
+ compiler, FlangFortranCompiler, for_machine)
+ return FlangFortranCompiler(
+ compiler, version, for_machine, is_cross, info,
+ exe_wrap, full_version=full_version, linker=linker)
+
+ if 'Open64 Compiler Suite' in err:
+ linker = guess_nix_linker(env,
+ compiler, Open64FortranCompiler, for_machine)
+ return Open64FortranCompiler(
+ compiler, version, for_machine, is_cross, info,
+ exe_wrap, full_version=full_version, linker=linker)
+
+ if 'NAG Fortran' in err:
+ linker = guess_nix_linker(env,
+ compiler, NAGFortranCompiler, for_machine)
+ return NAGFortranCompiler(
+ compiler, version, for_machine, is_cross, info,
+ exe_wrap, full_version=full_version, linker=linker)
+
+ _handle_exceptions(popen_exceptions, compilers)
+ raise EnvironmentException('Unreachable code (exception to make mypy happy)')
+
+def detect_objc_compiler(env: 'Environment', for_machine: MachineChoice) -> 'Compiler':
+ return _detect_objc_or_objcpp_compiler(env, for_machine, True)
+
+def detect_objcpp_compiler(env: 'Environment', for_machine: MachineChoice) -> 'Compiler':
+ return _detect_objc_or_objcpp_compiler(env, for_machine, False)
+
+def _detect_objc_or_objcpp_compiler(env: 'Environment', for_machine: MachineChoice, objc: bool) -> 'Compiler':
+ popen_exceptions: T.Dict[str, T.Union[Exception, str]] = {}
+ compilers, ccache, exe_wrap = _get_compilers(env, 'objc' if objc else 'objcpp', for_machine)
+ is_cross = env.is_cross_build(for_machine)
+ info = env.machines[for_machine]
+ comp: T.Union[T.Type[ObjCCompiler], T.Type[ObjCPPCompiler]]
+
+ for compiler in compilers:
+ arg = ['--version']
+ try:
+ p, out, err = Popen_safe(compiler + arg)
+ except OSError as e:
+ popen_exceptions[' '.join(compiler + arg)] = e
+ continue
+ version = search_version(out)
+ if 'Free Software Foundation' in out:
+ defines = _get_gnu_compiler_defines(compiler)
+ if not defines:
+ popen_exceptions[' '.join(compiler)] = 'no pre-processor defines'
+ continue
+ version = _get_gnu_version_from_defines(defines)
+ comp = GnuObjCCompiler if objc else GnuObjCPPCompiler
+ linker = guess_nix_linker(env, compiler, comp, for_machine)
+ return comp(
+ ccache + compiler, version, for_machine, is_cross, info,
+ exe_wrap, defines, linker=linker)
+ if 'clang' in out:
+ linker = None
+ defines = _get_clang_compiler_defines(compiler)
+ if not defines:
+ popen_exceptions[' '.join(compiler)] = 'no pre-processor defines'
+ continue
+ if 'Apple' in out:
+ comp = AppleClangObjCCompiler if objc else AppleClangObjCPPCompiler
+ else:
+ comp = ClangObjCCompiler if objc else ClangObjCPPCompiler
+ if 'windows' in out or env.machines[for_machine].is_windows():
+ # If we're in a MINGW context this actually will use a gnu style ld
+ try:
+ linker = guess_win_linker(env, compiler, comp, for_machine)
+ except MesonException:
+ pass
+
+ if not linker:
+ linker = guess_nix_linker(env, compiler, comp, for_machine)
+ return comp(
+ ccache + compiler, version, for_machine,
+ is_cross, info, exe_wrap, linker=linker, defines=defines)
+ _handle_exceptions(popen_exceptions, compilers)
+ raise EnvironmentException('Unreachable code (exception to make mypy happy)')
+
+def detect_java_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler:
+ exelist = env.lookup_binary_entry(for_machine, 'java')
+ info = env.machines[for_machine]
+ if exelist is None:
+ # TODO support fallback
+ exelist = [defaults['java'][0]]
+
+ try:
+ p, out, err = Popen_safe(exelist + ['-version'])
+ except OSError:
+ raise EnvironmentException('Could not execute Java compiler "{}"'.format(' '.join(exelist)))
+ if 'javac' in out or 'javac' in err:
+ version = search_version(err if 'javac' in err else out)
+ if not version or version == 'unknown version':
+ parts = (err if 'javac' in err else out).split()
+ if len(parts) > 1:
+ version = parts[1]
+ comp_class = JavaCompiler
+ env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ return comp_class(exelist, version, for_machine, info)
+ raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
+
+def detect_cs_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler:
+ compilers, ccache, exe_wrap = _get_compilers(env, 'cs', for_machine)
+ popen_exceptions = {}
+ info = env.machines[for_machine]
+ for comp in compilers:
+ try:
+ p, out, err = Popen_safe(comp + ['--version'])
+ except OSError as e:
+ popen_exceptions[' '.join(comp + ['--version'])] = e
+ continue
+
+ version = search_version(out)
+ cls: T.Union[T.Type[MonoCompiler], T.Type[VisualStudioCsCompiler]]
+ if 'Mono' in out:
+ cls = MonoCompiler
+ elif "Visual C#" in out:
+ cls = VisualStudioCsCompiler
+ else:
+ continue
+ env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ return cls(comp, version, for_machine, info)
+
+ _handle_exceptions(popen_exceptions, compilers)
+ raise EnvironmentException('Unreachable code (exception to make mypy happy)')
+
+def detect_cython_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler:
+ """Search for a cython compiler."""
+ compilers, _, _ = _get_compilers(env, 'cython', for_machine)
+ is_cross = env.is_cross_build(for_machine)
+ info = env.machines[for_machine]
+
+ popen_exceptions: T.Dict[str, Exception] = {}
+ for comp in compilers:
+ try:
+ err = Popen_safe(comp + ['-V'])[2]
+ except OSError as e:
+ popen_exceptions[' '.join(comp + ['-V'])] = e
+ continue
+
+ version = search_version(err)
+ if 'Cython' in err:
+ comp_class = CythonCompiler
+ env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ return comp_class(comp, version, for_machine, info, is_cross=is_cross)
+ _handle_exceptions(popen_exceptions, compilers)
+ raise EnvironmentException('Unreachable code (exception to make mypy happy)')
+
+def detect_vala_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler:
+ exelist = env.lookup_binary_entry(for_machine, 'vala')
+ is_cross = env.is_cross_build(for_machine)
+ info = env.machines[for_machine]
+ if exelist is None:
+ # TODO support fallback
+ exelist = [defaults['vala'][0]]
+
+ try:
+ p, out = Popen_safe(exelist + ['--version'])[0:2]
+ except OSError:
+ raise EnvironmentException('Could not execute Vala compiler "{}"'.format(' '.join(exelist)))
+ version = search_version(out)
+ if 'Vala' in out:
+ comp_class = ValaCompiler
+ env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ return comp_class(exelist, version, for_machine, is_cross, info)
+ raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
+
+def detect_rust_compiler(env: 'Environment', for_machine: MachineChoice) -> RustCompiler:
+ popen_exceptions = {} # type: T.Dict[str, Exception]
+ compilers, _, exe_wrap = _get_compilers(env, 'rust', for_machine)
+ is_cross = env.is_cross_build(for_machine)
+ info = env.machines[for_machine]
+
+ cc = detect_c_compiler(env, for_machine)
+ is_link_exe = isinstance(cc.linker, VisualStudioLikeLinkerMixin)
+ override = env.lookup_binary_entry(for_machine, 'rust_ld')
+
+ for compiler in compilers:
+ arg = ['--version']
+ try:
+ out = Popen_safe(compiler + arg)[1]
+ except OSError as e:
+ popen_exceptions[' '.join(compiler + arg)] = e
+ continue
+
+ version = search_version(out)
+
+ if 'rustc' in out:
+ # On Linux and mac rustc will invoke gcc (clang for mac
+ # presumably) and it can do this windows, for dynamic linking.
+ # this means the easiest way to C compiler for dynamic linking.
+ # figure out what linker to use is to just get the value of the
+ # C compiler and use that as the basis of the rust linker.
+ # However, there are two things we need to change, if CC is not
+ # the default use that, and second add the necessary arguments
+ # to rust to use -fuse-ld
+
+ if any(a.startswith('linker=') for a in compiler):
+ mlog.warning(
+ 'Please do not put -C linker= in your compiler '
+ 'command, set rust_ld=command in your cross file '
+ 'or use the RUST_LD environment variable, otherwise meson '
+ 'will override your selection.')
+
+ if override is None:
+ extra_args: T.Dict[str, T.Union[str, bool]] = {}
+ always_args: T.List[str] = []
+ if is_link_exe:
+ compiler.extend(RustCompiler.use_linker_args(cc.linker.exelist[0]))
+ extra_args['direct'] = True
+ extra_args['machine'] = cc.linker.machine
+ else:
+ exelist = cc.linker.exelist + cc.linker.get_always_args()
+ if 'ccache' in exelist[0]:
+ del exelist[0]
+ c = exelist.pop(0)
+ compiler.extend(RustCompiler.use_linker_args(c))
+
+ # Also ensure that we pass any extra arguments to the linker
+ for l in exelist:
+ compiler.extend(['-C', f'link-arg={l}'])
+
+ # This trickery with type() gets us the class of the linker
+ # so we can initialize a new copy for the Rust Compiler
+ # TODO rewrite this without type: ignore
+ if is_link_exe:
+ linker = type(cc.linker)(for_machine, always_args, exelist=cc.linker.exelist, # type: ignore
+ version=cc.linker.version, **extra_args) # type: ignore
+ else:
+ linker = type(cc.linker)(compiler, for_machine, cc.LINKER_PREFIX,
+ always_args=always_args, version=cc.linker.version,
+ **extra_args) # type: ignore
+ elif 'link' in override[0]:
+ linker = guess_win_linker(env,
+ override, RustCompiler, for_machine, use_linker_prefix=False)
+ # rustc takes linker arguments without a prefix, and
+ # inserts the correct prefix itself.
+ assert isinstance(linker, VisualStudioLikeLinkerMixin)
+ linker.direct = True
+ compiler.extend(RustCompiler.use_linker_args(linker.exelist[0]))
+ else:
+ # On linux and macos rust will invoke the c compiler for
+ # linking, on windows it will use lld-link or link.exe.
+ # we will simply ask for the C compiler that corresponds to
+ # it, and use that.
+ cc = _detect_c_or_cpp_compiler(env, 'c', for_machine, override_compiler=override)
+ linker = cc.linker
+
+ # Of course, we're not going to use any of that, we just
+ # need it to get the proper arguments to pass to rustc
+ c = linker.exelist[1] if linker.exelist[0].endswith('ccache') else linker.exelist[0]
+ compiler.extend(RustCompiler.use_linker_args(c))
+
+ env.coredata.add_lang_args(RustCompiler.language, RustCompiler, for_machine, env)
+ return RustCompiler(
+ compiler, version, for_machine, is_cross, info, exe_wrap,
+ linker=linker)
+
+ _handle_exceptions(popen_exceptions, compilers)
+ raise EnvironmentException('Unreachable code (exception to make mypy happy)')
+
+def detect_d_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler:
+ info = env.machines[for_machine]
+
+ # Detect the target architecture, required for proper architecture handling on Windows.
+ # MSVC compiler is required for correct platform detection.
+ c_compiler = {'c': detect_c_compiler(env, for_machine)}
+ is_msvc = isinstance(c_compiler['c'], VisualStudioCCompiler)
+ if not is_msvc:
+ c_compiler = {}
+
+ # Import here to avoid circular imports
+ from ..environment import detect_cpu_family
+ arch = detect_cpu_family(c_compiler)
+ if is_msvc and arch == 'x86':
+ arch = 'x86_mscoff'
+
+ popen_exceptions = {}
+ is_cross = env.is_cross_build(for_machine)
+ compilers, ccache, exe_wrap = _get_compilers(env, 'd', for_machine)
+ for exelist in compilers:
+ # Search for a D compiler.
+ # We prefer LDC over GDC unless overridden with the DC
+ # environment variable because LDC has a much more
+ # up to date language version at time (2016).
+ if os.path.basename(exelist[-1]).startswith(('ldmd', 'gdmd')):
+ raise EnvironmentException(
+ f'Meson does not support {exelist[-1]} as it is only a DMD frontend for another compiler.'
+ 'Please provide a valid value for DC or unset it so that Meson can resolve the compiler by itself.')
+ try:
+ p, out = Popen_safe(exelist + ['--version'])[0:2]
+ except OSError as e:
+ popen_exceptions[' '.join(exelist + ['--version'])] = e
+ continue
+ version = search_version(out)
+ full_version = out.split('\n', 1)[0]
+
+ if 'LLVM D compiler' in out:
+ # LDC seems to require a file
+ # We cannot use NamedTemproraryFile on windows, its documented
+ # to not work for our uses. So, just use mkstemp and only have
+ # one path for simplicity.
+ o, f = tempfile.mkstemp('.d')
+ os.close(o)
+
+ try:
+ if info.is_windows() or info.is_cygwin():
+ objfile = os.path.basename(f)[:-1] + 'obj'
+ linker = guess_win_linker(env,
+ exelist,
+ LLVMDCompiler, for_machine,
+ use_linker_prefix=True, invoked_directly=False,
+ extra_args=[f])
+ else:
+ # LDC writes an object file to the current working directory.
+ # Clean it up.
+ objfile = os.path.basename(f)[:-1] + 'o'
+ linker = guess_nix_linker(env,
+ exelist, LLVMDCompiler, for_machine,
+ extra_args=[f])
+ finally:
+ windows_proof_rm(f)
+ windows_proof_rm(objfile)
+
+ return LLVMDCompiler(
+ exelist, version, for_machine, info, arch,
+ full_version=full_version, linker=linker, version_output=out)
+ elif 'gdc' in out:
+ linker = guess_nix_linker(env, exelist, GnuDCompiler, for_machine)
+ return GnuDCompiler(
+ exelist, version, for_machine, info, arch,
+ exe_wrapper=exe_wrap, is_cross=is_cross,
+ full_version=full_version, linker=linker)
+ elif 'The D Language Foundation' in out or 'Digital Mars' in out:
+ # DMD seems to require a file
+ # We cannot use NamedTemproraryFile on windows, its documented
+ # to not work for our uses. So, just use mkstemp and only have
+ # one path for simplicity.
+ o, f = tempfile.mkstemp('.d')
+ os.close(o)
+
+ # DMD as different detection logic for x86 and x86_64
+ arch_arg = '-m64' if arch == 'x86_64' else '-m32'
+
+ try:
+ if info.is_windows() or info.is_cygwin():
+ objfile = os.path.basename(f)[:-1] + 'obj'
+ linker = guess_win_linker(env,
+ exelist, DmdDCompiler, for_machine,
+ invoked_directly=False, extra_args=[f, arch_arg])
+ else:
+ objfile = os.path.basename(f)[:-1] + 'o'
+ linker = guess_nix_linker(env,
+ exelist, DmdDCompiler, for_machine,
+ extra_args=[f, arch_arg])
+ finally:
+ windows_proof_rm(f)
+ windows_proof_rm(objfile)
+
+ return DmdDCompiler(
+ exelist, version, for_machine, info, arch,
+ full_version=full_version, linker=linker)
+ raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
+
+ _handle_exceptions(popen_exceptions, compilers)
+ raise EnvironmentException('Unreachable code (exception to make mypy happy)')
+
+def detect_swift_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler:
+ exelist = env.lookup_binary_entry(for_machine, 'swift')
+ is_cross = env.is_cross_build(for_machine)
+ info = env.machines[for_machine]
+ if exelist is None:
+ # TODO support fallback
+ exelist = [defaults['swift'][0]]
+
+ try:
+ p, _, err = Popen_safe(exelist + ['-v'])
+ except OSError:
+ raise EnvironmentException('Could not execute Swift compiler "{}"'.format(' '.join(exelist)))
+ version = search_version(err)
+ if 'Swift' in err:
+ # As for 5.0.1 swiftc *requires* a file to check the linker:
+ with tempfile.NamedTemporaryFile(suffix='.swift') as f:
+ linker = guess_nix_linker(env,
+ exelist, SwiftCompiler, for_machine,
+ extra_args=[f.name])
+ return SwiftCompiler(
+ exelist, version, for_machine, is_cross, info, linker=linker)
+
+ raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
+
+
+# GNU/Clang defines and version
+# =============================
+
+def _get_gnu_compiler_defines(compiler: T.List[str]) -> T.Dict[str, str]:
+ """
+ Detect GNU compiler platform type (Apple, MinGW, Unix)
+ """
+ # Arguments to output compiler pre-processor defines to stdout
+ # gcc, g++, and gfortran all support these arguments
+ args = compiler + ['-E', '-dM', '-']
+ p, output, error = Popen_safe(args, write='', stdin=subprocess.PIPE)
+ if p.returncode != 0:
+ raise EnvironmentException('Unable to detect GNU compiler type:\n' + output + error)
+ # Parse several lines of the type:
+ # `#define ___SOME_DEF some_value`
+ # and extract `___SOME_DEF`
+ defines: T.Dict[str, str] = {}
+ for line in output.split('\n'):
+ if not line:
+ continue
+ d, *rest = line.split(' ', 2)
+ if d != '#define':
+ continue
+ if len(rest) == 1:
+ defines[rest[0]] = ''
+ if len(rest) == 2:
+ defines[rest[0]] = rest[1]
+ return defines
+
+def _get_clang_compiler_defines(compiler: T.List[str]) -> T.Dict[str, str]:
+ """
+ Get the list of Clang pre-processor defines
+ """
+ args = compiler + ['-E', '-dM', '-']
+ p, output, error = Popen_safe(args, write='', stdin=subprocess.PIPE)
+ if p.returncode != 0:
+ raise EnvironmentException('Unable to get clang pre-processor defines:\n' + output + error)
+ defines: T.Dict[str, str] = {}
+ for line in output.split('\n'):
+ if not line:
+ continue
+ d, *rest = line.split(' ', 2)
+ if d != '#define':
+ continue
+ if len(rest) == 1:
+ defines[rest[0]] = ''
+ if len(rest) == 2:
+ defines[rest[0]] = rest[1]
+ return defines
+
+def _get_gnu_version_from_defines(defines: T.Dict[str, str]) -> str:
+ dot = '.'
+ major = defines.get('__GNUC__', '0')
+ minor = defines.get('__GNUC_MINOR__', '0')
+ patch = defines.get('__GNUC_PATCHLEVEL__', '0')
+ return dot.join((major, minor, patch))
+
+def _get_lcc_version_from_defines(defines: T.Dict[str, str]) -> str:
+ dot = '.'
+ generation_and_major = defines.get('__LCC__', '100')
+ generation = generation_and_major[:1]
+ major = generation_and_major[1:]
+ minor = defines.get('__LCC_MINOR__', '0')
+ return dot.join((generation, major, minor))
diff --git a/meson/mesonbuild/compilers/fortran.py b/meson/mesonbuild/compilers/fortran.py
new file mode 100644
index 000000000..e15ac569d
--- /dev/null
+++ b/meson/mesonbuild/compilers/fortran.py
@@ -0,0 +1,504 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pathlib import Path
+import typing as T
+import subprocess, os
+
+from .. import coredata
+from .compilers import (
+ clike_debug_args,
+ Compiler,
+)
+from .mixins.clike import CLikeCompiler
+from .mixins.gnu import (
+ GnuCompiler, gnulike_buildtype_args, gnu_optimization_args,
+)
+from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler
+from .mixins.clang import ClangCompiler
+from .mixins.elbrus import ElbrusCompiler
+from .mixins.pgi import PGICompiler
+
+from mesonbuild.mesonlib import (
+ version_compare, EnvironmentException, MesonException, MachineChoice,
+ LibType, OptionKey,
+)
+
+if T.TYPE_CHECKING:
+ from ..coredata import KeyedOptionDictType
+ from ..dependencies import Dependency
+ from ..envconfig import MachineInfo
+ from ..environment import Environment
+ from ..linkers import DynamicLinker
+ from ..programs import ExternalProgram
+
+
+class FortranCompiler(CLikeCompiler, Compiler):
+
+ language = 'fortran'
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ Compiler.__init__(self, exelist, version, for_machine, info,
+ is_cross=is_cross, full_version=full_version, linker=linker)
+ CLikeCompiler.__init__(self, exe_wrapper)
+
+ def has_function(self, funcname: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
+ raise MesonException('Fortran does not have "has_function" capability.\n'
+ 'It is better to test if a Fortran capability is working like:\n\n'
+ "meson.get_compiler('fortran').links('block; end block; end program')\n\n"
+ 'that example is to see if the compiler has Fortran 2008 Block element.')
+
+ def sanity_check(self, work_dir_: str, environment: 'Environment') -> None:
+ work_dir = Path(work_dir_)
+ source_name = work_dir / 'sanitycheckf.f90'
+ binary_name = work_dir / 'sanitycheckf'
+ if binary_name.is_file():
+ binary_name.unlink()
+
+ source_name.write_text('program main; print *, "Fortran compilation is working."; end program', encoding='utf-8')
+
+ extra_flags: T.List[str] = []
+ extra_flags += environment.coredata.get_external_args(self.for_machine, self.language)
+ extra_flags += environment.coredata.get_external_link_args(self.for_machine, self.language)
+ extra_flags += self.get_always_args()
+ # %% build the test executable "sanitycheckf"
+ # cwd=work_dir is necessary on Windows especially for Intel compilers to avoid error: cannot write on sanitycheckf.obj
+ # this is a defect with how Windows handles files and ifort's object file-writing behavior vis concurrent ProcessPoolExecutor.
+ # This simple workaround solves the issue.
+ # FIXME: cwd=str(work_dir) is for Python 3.5 on Windows, when 3.5 is deprcated, this can become cwd=work_dir
+ returncode = subprocess.run(self.exelist + extra_flags + [str(source_name), '-o', str(binary_name)],
+ cwd=str(work_dir)).returncode
+ if returncode != 0:
+ raise EnvironmentException('Compiler %s can not compile programs.' % self.name_string())
+ if self.is_cross:
+ if self.exe_wrapper is None:
+ # Can't check if the binaries run so we have to assume they do
+ return
+ cmdlist = self.exe_wrapper.get_command() + [str(binary_name)]
+ else:
+ cmdlist = [str(binary_name)]
+ # %% Run the test executable
+ try:
+ returncode = subprocess.run(cmdlist, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL).returncode
+ if returncode != 0:
+ raise EnvironmentException('Executables created by Fortran compiler %s are not runnable.' % self.name_string())
+ except OSError:
+ raise EnvironmentException('Executables created by Fortran compiler %s are not runnable.' % self.name_string())
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return gnulike_buildtype_args[buildtype]
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return gnu_optimization_args[optimization_level]
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ return clike_debug_args[is_debug]
+
+ def get_preprocess_only_args(self) -> T.List[str]:
+ return ['-cpp'] + super().get_preprocess_only_args()
+
+ def get_module_incdir_args(self) -> T.Tuple[str, ...]:
+ return ('-I', )
+
+ def get_module_outdir_args(self, path: str) -> T.List[str]:
+ return ['-module', path]
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+ build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-I' or i[:2] == '-L':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+ return parameter_list
+
+ def module_name_to_filename(self, module_name: str) -> str:
+ if '_' in module_name: # submodule
+ s = module_name.lower()
+ if self.id in ('gcc', 'intel', 'intel-cl'):
+ filename = s.replace('_', '@') + '.smod'
+ elif self.id in ('pgi', 'flang'):
+ filename = s.replace('_', '-') + '.mod'
+ else:
+ filename = s + '.mod'
+ else: # module
+ filename = module_name.lower() + '.mod'
+
+ return filename
+
+ def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
+ libtype: LibType = LibType.PREFER_SHARED) -> T.Optional[T.List[str]]:
+ code = 'stop; end program'
+ return self._find_library_impl(libname, env, extra_dirs, code, libtype)
+
+ def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
+ return self._has_multi_arguments(args, env, 'stop; end program')
+
+ def has_multi_link_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
+ return self._has_multi_link_arguments(args, env, 'stop; end program')
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = super().get_options()
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts.update({
+ key: coredata.UserComboOption(
+ 'Fortran language standard to use',
+ ['none'],
+ 'none',
+ ),
+ })
+ return opts
+
+
+class GnuFortranCompiler(GnuCompiler, FortranCompiler):
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ defines: T.Optional[T.Dict[str, str]] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ FortranCompiler.__init__(self, exelist, version, for_machine,
+ is_cross, info, exe_wrapper, linker=linker,
+ full_version=full_version)
+ GnuCompiler.__init__(self, defines)
+ default_warn_args = ['-Wall']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic', '-fimplicit-none']}
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = FortranCompiler.get_options(self)
+ fortran_stds = ['legacy', 'f95', 'f2003']
+ if version_compare(self.version, '>=4.4.0'):
+ fortran_stds += ['f2008']
+ if version_compare(self.version, '>=8.0.0'):
+ fortran_stds += ['f2018']
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts[key].choices = ['none'] + fortran_stds
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key]
+ if std.value != 'none':
+ args.append('-std=' + std.value)
+ return args
+
+ def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+ # Disabled until this is fixed:
+ # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=62162
+ # return ['-cpp', '-MD', '-MQ', outtarget]
+ return []
+
+ def get_module_outdir_args(self, path: str) -> T.List[str]:
+ return ['-J' + path]
+
+ def language_stdlib_only_link_flags(self) -> T.List[str]:
+ return ['-lgfortran', '-lm']
+
+ def has_header(self, hname: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None,
+ disable_cache: bool = False) -> T.Tuple[bool, bool]:
+ '''
+ Derived from mixins/clike.py:has_header, but without C-style usage of
+ __has_include which breaks with GCC-Fortran 10:
+ https://github.com/mesonbuild/meson/issues/7017
+ '''
+ code = f'{prefix}\n#include <{hname}>'
+ return self.compiles(code, env, extra_args=extra_args,
+ dependencies=dependencies, mode='preprocess', disable_cache=disable_cache)
+
+
+class ElbrusFortranCompiler(GnuFortranCompiler, ElbrusCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ defines: T.Optional[T.Dict[str, str]] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ GnuFortranCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, defines=defines,
+ linker=linker, full_version=full_version)
+ ElbrusCompiler.__init__(self)
+
+class G95FortranCompiler(FortranCompiler):
+
+ LINKER_PREFIX = '-Wl,'
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ FortranCompiler.__init__(self, exelist, version, for_machine,
+ is_cross, info, exe_wrapper, linker=linker,
+ full_version=full_version)
+ self.id = 'g95'
+ default_warn_args = ['-Wall']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-pedantic']}
+
+ def get_module_outdir_args(self, path: str) -> T.List[str]:
+ return ['-fmod=' + path]
+
+ def get_no_warn_args(self) -> T.List[str]:
+ # FIXME: Confirm that there's no compiler option to disable all warnings
+ return []
+
+
+class SunFortranCompiler(FortranCompiler):
+
+ LINKER_PREFIX = '-Wl,'
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ FortranCompiler.__init__(self, exelist, version, for_machine,
+ is_cross, info, exe_wrapper, linker=linker,
+ full_version=full_version)
+ self.id = 'sun'
+
+ def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+ return ['-fpp']
+
+ def get_always_args(self) -> T.List[str]:
+ return []
+
+ def get_warn_args(self, level: str) -> T.List[str]:
+ return []
+
+ def get_module_incdir_args(self) -> T.Tuple[str, ...]:
+ return ('-M', )
+
+ def get_module_outdir_args(self, path: str) -> T.List[str]:
+ return ['-moddir=' + path]
+
+ def openmp_flags(self) -> T.List[str]:
+ return ['-xopenmp']
+
+
+class IntelFortranCompiler(IntelGnuLikeCompiler, FortranCompiler):
+
+ file_suffixes = ('f90', 'f', 'for', 'ftn', 'fpp', )
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ FortranCompiler.__init__(self, exelist, version, for_machine,
+ is_cross, info, exe_wrapper, linker=linker,
+ full_version=full_version)
+ # FIXME: Add support for OS X and Windows in detect_fortran_compiler so
+ # we are sent the type of compiler
+ IntelGnuLikeCompiler.__init__(self)
+ self.id = 'intel'
+ default_warn_args = ['-warn', 'general', '-warn', 'truncated_source']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['-warn', 'unused'],
+ '3': ['-warn', 'all']}
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = FortranCompiler.get_options(self)
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts[key].choices = ['none', 'legacy', 'f95', 'f2003', 'f2008', 'f2018']
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key]
+ stds = {'legacy': 'none', 'f95': 'f95', 'f2003': 'f03', 'f2008': 'f08', 'f2018': 'f18'}
+ if std.value != 'none':
+ args.append('-stand=' + stds[std.value])
+ return args
+
+ def get_preprocess_only_args(self) -> T.List[str]:
+ return ['-cpp', '-EP']
+
+ def language_stdlib_only_link_flags(self) -> T.List[str]:
+ return ['-lifcore', '-limf']
+
+ def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+ return ['-gen-dep=' + outtarget, '-gen-depformat=make']
+
+
+class IntelClFortranCompiler(IntelVisualStudioLikeCompiler, FortranCompiler):
+
+ file_suffixes = ('f90', 'f', 'for', 'ftn', 'fpp', )
+ always_args = ['/nologo']
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo', target: str,
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ FortranCompiler.__init__(self, exelist, version, for_machine,
+ is_cross, info, exe_wrapper, linker=linker,
+ full_version=full_version)
+ IntelVisualStudioLikeCompiler.__init__(self, target)
+
+ default_warn_args = ['/warn:general', '/warn:truncated_source']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['/warn:unused'],
+ '3': ['/warn:all']}
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ opts = FortranCompiler.get_options(self)
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ opts[key].choices = ['none', 'legacy', 'f95', 'f2003', 'f2008', 'f2018']
+ return opts
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key]
+ stds = {'legacy': 'none', 'f95': 'f95', 'f2003': 'f03', 'f2008': 'f08', 'f2018': 'f18'}
+ if std.value != 'none':
+ args.append('/stand:' + stds[std.value])
+ return args
+
+ def get_module_outdir_args(self, path: str) -> T.List[str]:
+ return ['/module:' + path]
+
+
+class PathScaleFortranCompiler(FortranCompiler):
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ FortranCompiler.__init__(self, exelist, version, for_machine,
+ is_cross, info, exe_wrapper, linker=linker,
+ full_version=full_version)
+ self.id = 'pathscale'
+ default_warn_args = ['-fullwarn']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args,
+ '3': default_warn_args}
+
+ def openmp_flags(self) -> T.List[str]:
+ return ['-mp']
+
+
+class PGIFortranCompiler(PGICompiler, FortranCompiler):
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ FortranCompiler.__init__(self, exelist, version, for_machine,
+ is_cross, info, exe_wrapper, linker=linker,
+ full_version=full_version)
+ PGICompiler.__init__(self)
+
+ default_warn_args = ['-Minform=inform']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args,
+ '3': default_warn_args + ['-Mdclchk']}
+
+ def language_stdlib_only_link_flags(self) -> T.List[str]:
+ return ['-lpgf90rtl', '-lpgf90', '-lpgf90_rpm1', '-lpgf902',
+ '-lpgf90rtl', '-lpgftnrtl', '-lrt']
+
+
+class NvidiaHPC_FortranCompiler(PGICompiler, FortranCompiler):
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ FortranCompiler.__init__(self, exelist, version, for_machine,
+ is_cross, info, exe_wrapper, linker=linker,
+ full_version=full_version)
+ PGICompiler.__init__(self)
+
+ self.id = 'nvidia_hpc'
+ default_warn_args = ['-Minform=inform']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args,
+ '3': default_warn_args + ['-Mdclchk']}
+
+
+class FlangFortranCompiler(ClangCompiler, FortranCompiler):
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ FortranCompiler.__init__(self, exelist, version, for_machine,
+ is_cross, info, exe_wrapper, linker=linker,
+ full_version=full_version)
+ ClangCompiler.__init__(self, {})
+ self.id = 'flang'
+ default_warn_args = ['-Minform=inform']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args,
+ '3': default_warn_args}
+
+ def language_stdlib_only_link_flags(self) -> T.List[str]:
+ return ['-lflang', '-lpgmath']
+
+class Open64FortranCompiler(FortranCompiler):
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ FortranCompiler.__init__(self, exelist, version, for_machine,
+ is_cross, info, exe_wrapper, linker=linker,
+ full_version=full_version)
+ self.id = 'open64'
+ default_warn_args = ['-fullwarn']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args,
+ '3': default_warn_args}
+
+ def openmp_flags(self) -> T.List[str]:
+ return ['-mp']
+
+
+class NAGFortranCompiler(FortranCompiler):
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool,
+ info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ FortranCompiler.__init__(self, exelist, version, for_machine,
+ is_cross, info, exe_wrapper, linker=linker,
+ full_version=full_version)
+ self.id = 'nagfor'
+
+ def get_warn_args(self, level: str) -> T.List[str]:
+ return []
+
+ def get_module_outdir_args(self, path: str) -> T.List[str]:
+ return ['-mdir', path]
+
+ def openmp_flags(self) -> T.List[str]:
+ return ['-openmp']
diff --git a/meson/mesonbuild/compilers/java.py b/meson/mesonbuild/compilers/java.py
new file mode 100644
index 000000000..ab8245057
--- /dev/null
+++ b/meson/mesonbuild/compilers/java.py
@@ -0,0 +1,104 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path
+import shutil
+import subprocess
+import textwrap
+import typing as T
+
+from ..mesonlib import EnvironmentException, MachineChoice
+from .compilers import Compiler, java_buildtype_args
+from .mixins.islinker import BasicLinkerIsCompilerMixin
+
+if T.TYPE_CHECKING:
+ from ..envconfig import MachineInfo
+ from ..environment import Environment
+
+class JavaCompiler(BasicLinkerIsCompilerMixin, Compiler):
+
+ language = 'java'
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ info: 'MachineInfo', full_version: T.Optional[str] = None):
+ super().__init__(exelist, version, for_machine, info, full_version=full_version)
+ self.id = 'unknown'
+ self.javarunner = 'java'
+
+ def get_werror_args(self) -> T.List[str]:
+ return ['-Werror']
+
+ def get_output_args(self, subdir: str) -> T.List[str]:
+ if subdir == '':
+ subdir = './'
+ return ['-d', subdir, '-s', subdir]
+
+ def get_pic_args(self) -> T.List[str]:
+ return []
+
+ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+ return []
+
+ def get_pch_name(self, name: str) -> str:
+ return ''
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return java_buildtype_args[buildtype]
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+ build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i in ['-cp', '-classpath', '-sourcepath'] and idx + 1 < len(parameter_list):
+ path_list = parameter_list[idx + 1].split(os.pathsep)
+ path_list = [os.path.normpath(os.path.join(build_dir, x)) for x in path_list]
+ parameter_list[idx + 1] = os.pathsep.join(path_list)
+
+ return parameter_list
+
+ def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+ src = 'SanityCheck.java'
+ obj = 'SanityCheck'
+ source_name = os.path.join(work_dir, src)
+ with open(source_name, 'w', encoding='utf-8') as ofile:
+ ofile.write(textwrap.dedent(
+ '''class SanityCheck {
+ public static void main(String[] args) {
+ int i;
+ }
+ }
+ '''))
+ pc = subprocess.Popen(self.exelist + [src], cwd=work_dir)
+ pc.wait()
+ if pc.returncode != 0:
+ raise EnvironmentException('Java compiler %s can not compile programs.' % self.name_string())
+ runner = shutil.which(self.javarunner)
+ if runner:
+ cmdlist = [runner, obj]
+ pe = subprocess.Popen(cmdlist, cwd=work_dir)
+ pe.wait()
+ if pe.returncode != 0:
+ raise EnvironmentException('Executables created by Java compiler %s are not runnable.' % self.name_string())
+ else:
+ m = "Java Virtual Machine wasn't found, but it's needed by Meson. " \
+ "Please install a JRE.\nIf you have specific needs where this " \
+ "requirement doesn't make sense, please open a bug at " \
+ "https://github.com/mesonbuild/meson/issues/new and tell us " \
+ "all about it."
+ raise EnvironmentException(m)
+
+ def needs_static_linker(self) -> bool:
+ return False
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return []
diff --git a/meson/mesonbuild/compilers/mixins/__init__.py b/meson/mesonbuild/compilers/mixins/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/__init__.py
diff --git a/meson/mesonbuild/compilers/mixins/arm.py b/meson/mesonbuild/compilers/mixins/arm.py
new file mode 100644
index 000000000..4e1898ae7
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/arm.py
@@ -0,0 +1,190 @@
+# Copyright 2012-2020 Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Representations specific to the arm family of compilers."""
+
+import os
+import typing as T
+
+from ... import mesonlib
+from ...linkers import ArmClangDynamicLinker
+from ...mesonlib import OptionKey
+from ..compilers import clike_debug_args
+from .clang import clang_color_args
+
+if T.TYPE_CHECKING:
+ from ...environment import Environment
+ from ...compilers.compilers import Compiler
+else:
+ # This is a bit clever, for mypy we pretend that these mixins descend from
+ # Compiler, so we get all of the methods and attributes defined for us, but
+ # for runtime we make them descend from object (which all classes normally
+ # do). This gives up DRYer type checking, with no runtime impact
+ Compiler = object
+
+arm_buildtype_args = {
+ 'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
+ 'custom': [],
+} # type: T.Dict[str, T.List[str]]
+
+arm_optimization_args = {
+ '0': ['-O0'],
+ 'g': ['-g'],
+ '1': ['-O1'],
+ '2': [], # Compiler defaults to -O2
+ '3': ['-O3', '-Otime'],
+ 's': ['-O3'], # Compiler defaults to -Ospace
+} # type: T.Dict[str, T.List[str]]
+
+armclang_buildtype_args = {
+ 'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
+ 'custom': [],
+} # type: T.Dict[str, T.List[str]]
+
+armclang_optimization_args = {
+ '0': [], # Compiler defaults to -O0
+ 'g': ['-g'],
+ '1': ['-O1'],
+ '2': ['-O2'],
+ '3': ['-O3'],
+ 's': ['-Oz']
+} # type: T.Dict[str, T.List[str]]
+
+
+class ArmCompiler(Compiler):
+
+ """Functionality that is common to all ARM family compilers."""
+
+ def __init__(self) -> None:
+ if not self.is_cross:
+ raise mesonlib.EnvironmentException('armcc supports only cross-compilation.')
+ self.id = 'arm'
+ default_warn_args = [] # type: T.List[str]
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + [],
+ '3': default_warn_args + []} # type: T.Dict[str, T.List[str]]
+ # Assembly
+ self.can_compile_suffixes.add('s')
+
+ def get_pic_args(self) -> T.List[str]:
+ # FIXME: Add /ropi, /rwpi, /fpic etc. qualifiers to --apcs
+ return []
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return arm_buildtype_args[buildtype]
+
+ # Override CCompiler.get_always_args
+ def get_always_args(self) -> T.List[str]:
+ return []
+
+ def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+ return ['--depend_target', outtarget, '--depend', outfile, '--depend_single_line']
+
+ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+ # FIXME: Add required arguments
+ # NOTE from armcc user guide:
+ # "Support for Precompiled Header (PCH) files is deprecated from ARM Compiler 5.05
+ # onwards on all platforms. Note that ARM Compiler on Windows 8 never supported
+ # PCH files."
+ return []
+
+ def get_pch_suffix(self) -> str:
+ # NOTE from armcc user guide:
+ # "Support for Precompiled Header (PCH) files is deprecated from ARM Compiler 5.05
+ # onwards on all platforms. Note that ARM Compiler on Windows 8 never supported
+ # PCH files."
+ return 'pch'
+
+ def thread_flags(self, env: 'Environment') -> T.List[str]:
+ return []
+
+ def get_coverage_args(self) -> T.List[str]:
+ return []
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return arm_optimization_args[optimization_level]
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ return clike_debug_args[is_debug]
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-I' or i[:2] == '-L':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+ return parameter_list
+
+
+class ArmclangCompiler(Compiler):
+
+ def __init__(self) -> None:
+ if not self.is_cross:
+ raise mesonlib.EnvironmentException('armclang supports only cross-compilation.')
+ # Check whether 'armlink' is available in path
+ if not isinstance(self.linker, ArmClangDynamicLinker):
+ raise mesonlib.EnvironmentException(f'Unsupported Linker {self.linker.exelist}, must be armlink')
+ if not mesonlib.version_compare(self.version, '==' + self.linker.version):
+ raise mesonlib.EnvironmentException('armlink version does not match with compiler version')
+ self.id = 'armclang'
+ self.base_options = {
+ OptionKey(o) for o in
+ ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage',
+ 'b_ndebug', 'b_staticpic', 'b_colorout']}
+ # Assembly
+ self.can_compile_suffixes.add('s')
+
+ def get_pic_args(self) -> T.List[str]:
+ # PIC support is not enabled by default for ARM,
+ # if users want to use it, they need to add the required arguments explicitly
+ return []
+
+ def get_colorout_args(self, colortype: str) -> T.List[str]:
+ return clang_color_args[colortype][:]
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return armclang_buildtype_args[buildtype]
+
+ def get_pch_suffix(self) -> str:
+ return 'gch'
+
+ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+ # Workaround for Clang bug http://llvm.org/bugs/show_bug.cgi?id=15136
+ # This flag is internal to Clang (or at least not documented on the man page)
+ # so it might change semantics at any time.
+ return ['-include-pch', os.path.join(pch_dir, self.get_pch_name(header))]
+
+ def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+ return ['-MD', '-MT', outtarget, '-MF', outfile]
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return armclang_optimization_args[optimization_level]
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ return clike_debug_args[is_debug]
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-I' or i[:2] == '-L':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+ return parameter_list
diff --git a/meson/mesonbuild/compilers/mixins/c2000.py b/meson/mesonbuild/compilers/mixins/c2000.py
new file mode 100644
index 000000000..287aaa89e
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/c2000.py
@@ -0,0 +1,124 @@
+# Copyright 2012-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Representations specific to the Texas Instruments C2000 compiler family."""
+
+import os
+import typing as T
+
+from ...mesonlib import EnvironmentException
+
+if T.TYPE_CHECKING:
+ from ...environment import Environment
+ from ...compilers.compilers import Compiler
+else:
+ # This is a bit clever, for mypy we pretend that these mixins descend from
+ # Compiler, so we get all of the methods and attributes defined for us, but
+ # for runtime we make them descend from object (which all classes normally
+ # do). This gives up DRYer type checking, with no runtime impact
+ Compiler = object
+
+c2000_buildtype_args = {
+ 'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
+ 'custom': [],
+} # type: T.Dict[str, T.List[str]]
+
+c2000_optimization_args = {
+ '0': ['-O0'],
+ 'g': ['-Ooff'],
+ '1': ['-O1'],
+ '2': ['-O2'],
+ '3': ['-O3'],
+ 's': ['-04']
+} # type: T.Dict[str, T.List[str]]
+
+c2000_debug_args = {
+ False: [],
+ True: []
+} # type: T.Dict[bool, T.List[str]]
+
+
+class C2000Compiler(Compiler):
+
+ def __init__(self) -> None:
+ if not self.is_cross:
+ raise EnvironmentException('c2000 supports only cross-compilation.')
+ self.id = 'c2000'
+ # Assembly
+ self.can_compile_suffixes.add('asm')
+ default_warn_args = [] # type: T.List[str]
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + [],
+ '3': default_warn_args + []} # type: T.Dict[str, T.List[str]]
+
+ def get_pic_args(self) -> T.List[str]:
+ # PIC support is not enabled by default for c2000,
+ # if users want to use it, they need to add the required arguments explicitly
+ return []
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return c2000_buildtype_args[buildtype]
+
+ def get_pch_suffix(self) -> str:
+ return 'pch'
+
+ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+ return []
+
+ def thread_flags(self, env: 'Environment') -> T.List[str]:
+ return []
+
+ def get_coverage_args(self) -> T.List[str]:
+ return []
+
+ def get_no_stdinc_args(self) -> T.List[str]:
+ return []
+
+ def get_no_stdlib_link_args(self) -> T.List[str]:
+ return []
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return c2000_optimization_args[optimization_level]
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ return c2000_debug_args[is_debug]
+
+ @classmethod
+ def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]:
+ result = []
+ for i in args:
+ if i.startswith('-D'):
+ i = '-define=' + i[2:]
+ if i.startswith('-I'):
+ i = '-include=' + i[2:]
+ if i.startswith('-Wl,-rpath='):
+ continue
+ elif i == '--print-search-dirs':
+ continue
+ elif i.startswith('-L'):
+ continue
+ result.append(i)
+ return result
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i[:9] == '-include=':
+ parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+
+ return parameter_list
diff --git a/meson/mesonbuild/compilers/mixins/ccrx.py b/meson/mesonbuild/compilers/mixins/ccrx.py
new file mode 100644
index 000000000..eba4c455f
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/ccrx.py
@@ -0,0 +1,130 @@
+# Copyright 2012-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Representations specific to the Renesas CC-RX compiler family."""
+
+import os
+import typing as T
+
+from ...mesonlib import EnvironmentException
+
+if T.TYPE_CHECKING:
+ from ...environment import Environment
+ from ...compilers.compilers import Compiler
+else:
+ # This is a bit clever, for mypy we pretend that these mixins descend from
+ # Compiler, so we get all of the methods and attributes defined for us, but
+ # for runtime we make them descend from object (which all classes normally
+ # do). This gives up DRYer type checking, with no runtime impact
+ Compiler = object
+
+ccrx_buildtype_args = {
+ 'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
+ 'custom': [],
+} # type: T.Dict[str, T.List[str]]
+
+ccrx_optimization_args = {
+ '0': ['-optimize=0'],
+ 'g': ['-optimize=0'],
+ '1': ['-optimize=1'],
+ '2': ['-optimize=2'],
+ '3': ['-optimize=max'],
+ 's': ['-optimize=2', '-size']
+} # type: T.Dict[str, T.List[str]]
+
+ccrx_debug_args = {
+ False: [],
+ True: ['-debug']
+} # type: T.Dict[bool, T.List[str]]
+
+
+class CcrxCompiler(Compiler):
+
+ if T.TYPE_CHECKING:
+ is_cross = True
+ can_compile_suffixes = set() # type: T.Set[str]
+
+ def __init__(self) -> None:
+ if not self.is_cross:
+ raise EnvironmentException('ccrx supports only cross-compilation.')
+ self.id = 'ccrx'
+ # Assembly
+ self.can_compile_suffixes.add('src')
+ default_warn_args = [] # type: T.List[str]
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + [],
+ '3': default_warn_args + []} # type: T.Dict[str, T.List[str]]
+
+ def get_pic_args(self) -> T.List[str]:
+ # PIC support is not enabled by default for CCRX,
+ # if users want to use it, they need to add the required arguments explicitly
+ return []
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return ccrx_buildtype_args[buildtype]
+
+ def get_pch_suffix(self) -> str:
+ return 'pch'
+
+ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+ return []
+
+ def thread_flags(self, env: 'Environment') -> T.List[str]:
+ return []
+
+ def get_coverage_args(self) -> T.List[str]:
+ return []
+
+ def get_no_stdinc_args(self) -> T.List[str]:
+ return []
+
+ def get_no_stdlib_link_args(self) -> T.List[str]:
+ return []
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return ccrx_optimization_args[optimization_level]
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ return ccrx_debug_args[is_debug]
+
+ @classmethod
+ def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]:
+ result = []
+ for i in args:
+ if i.startswith('-D'):
+ i = '-define=' + i[2:]
+ if i.startswith('-I'):
+ i = '-include=' + i[2:]
+ if i.startswith('-Wl,-rpath='):
+ continue
+ elif i == '--print-search-dirs':
+ continue
+ elif i.startswith('-L'):
+ continue
+ elif not i.startswith('-lib=') and i.endswith(('.a', '.lib')):
+ i = '-lib=' + i
+ result.append(i)
+ return result
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i[:9] == '-include=':
+ parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+
+ return parameter_list
diff --git a/meson/mesonbuild/compilers/mixins/clang.py b/meson/mesonbuild/compilers/mixins/clang.py
new file mode 100644
index 000000000..f7e94928c
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/clang.py
@@ -0,0 +1,162 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Abstractions for the LLVM/Clang compiler family."""
+
+import os
+import shutil
+import typing as T
+
+from ... import mesonlib
+from ...linkers import AppleDynamicLinker, ClangClDynamicLinker, LLVMDynamicLinker, GnuGoldDynamicLinker
+from ...mesonlib import OptionKey
+from ..compilers import CompileCheckMode
+from .gnu import GnuLikeCompiler
+
+if T.TYPE_CHECKING:
+ from ...environment import Environment
+ from ...dependencies import Dependency # noqa: F401
+
+clang_color_args = {
+ 'auto': ['-fcolor-diagnostics'],
+ 'always': ['-fcolor-diagnostics'],
+ 'never': ['-fno-color-diagnostics'],
+} # type: T.Dict[str, T.List[str]]
+
+clang_optimization_args = {
+ '0': ['-O0'],
+ 'g': ['-Og'],
+ '1': ['-O1'],
+ '2': ['-O2'],
+ '3': ['-O3'],
+ 's': ['-Os'],
+} # type: T.Dict[str, T.List[str]]
+
+class ClangCompiler(GnuLikeCompiler):
+
+ def __init__(self, defines: T.Optional[T.Dict[str, str]]):
+ super().__init__()
+ self.id = 'clang'
+ self.defines = defines or {}
+ self.base_options.update(
+ {OptionKey('b_colorout'), OptionKey('b_lto_threads'), OptionKey('b_lto_mode')})
+
+ # TODO: this really should be part of the linker base_options, but
+ # linkers don't have base_options.
+ if isinstance(self.linker, AppleDynamicLinker):
+ self.base_options.add(OptionKey('b_bitcode'))
+ # All Clang backends can also do LLVM IR
+ self.can_compile_suffixes.add('ll')
+
+ def get_colorout_args(self, colortype: str) -> T.List[str]:
+ return clang_color_args[colortype][:]
+
+ def has_builtin_define(self, define: str) -> bool:
+ return define in self.defines
+
+ def get_builtin_define(self, define: str) -> T.Optional[str]:
+ return self.defines.get(define)
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return clang_optimization_args[optimization_level]
+
+ def get_pch_suffix(self) -> str:
+ return 'pch'
+
+ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+ # Workaround for Clang bug http://llvm.org/bugs/show_bug.cgi?id=15136
+ # This flag is internal to Clang (or at least not documented on the man page)
+ # so it might change semantics at any time.
+ return ['-include-pch', os.path.join(pch_dir, self.get_pch_name(header))]
+
+ def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
+ # Clang is different than GCC, it will return True when a symbol isn't
+ # defined in a header. Specifically this seems ot have something to do
+ # with functions that may be in a header on some systems, but not all of
+ # them. `strlcat` specifically with can trigger this.
+ myargs: T.List[str] = ['-Werror=implicit-function-declaration']
+ if mode is CompileCheckMode.COMPILE:
+ myargs.extend(['-Werror=unknown-warning-option', '-Werror=unused-command-line-argument'])
+ if mesonlib.version_compare(self.version, '>=3.6.0'):
+ myargs.append('-Werror=ignored-optimization-argument')
+ return super().get_compiler_check_args(mode) + myargs
+
+ def has_function(self, funcname: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
+ if extra_args is None:
+ extra_args = []
+ # Starting with XCode 8, we need to pass this to force linker
+ # visibility to obey OS X/iOS/tvOS minimum version targets with
+ # -mmacosx-version-min, -miphoneos-version-min, -mtvos-version-min etc.
+ # https://github.com/Homebrew/homebrew-core/issues/3727
+ # TODO: this really should be communicated by the linker
+ if isinstance(self.linker, AppleDynamicLinker) and mesonlib.version_compare(self.version, '>=8.0'):
+ extra_args.append('-Wl,-no_weak_imports')
+ return super().has_function(funcname, prefix, env, extra_args=extra_args,
+ dependencies=dependencies)
+
+ def openmp_flags(self) -> T.List[str]:
+ if mesonlib.version_compare(self.version, '>=3.8.0'):
+ return ['-fopenmp']
+ elif mesonlib.version_compare(self.version, '>=3.7.0'):
+ return ['-fopenmp=libomp']
+ else:
+ # Shouldn't work, but it'll be checked explicitly in the OpenMP dependency.
+ return []
+
+ @classmethod
+ def use_linker_args(cls, linker: str) -> T.List[str]:
+ # Clang additionally can use a linker specified as a path, which GCC
+ # (and other gcc-like compilers) cannot. This is becuse clang (being
+ # llvm based) is retargetable, while GCC is not.
+ #
+
+ # qcld: Qualcomm Snapdragon linker, based on LLVM
+ if linker == 'qcld':
+ return ['-fuse-ld=qcld']
+
+ if shutil.which(linker):
+ if not shutil.which(linker):
+ raise mesonlib.MesonException(
+ f'Cannot find linker {linker}.')
+ return [f'-fuse-ld={linker}']
+ return super().use_linker_args(linker)
+
+ def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]:
+ # Clang only warns about unknown or ignored attributes, so force an
+ # error.
+ return ['-Werror=attributes']
+
+ def get_coverage_link_args(self) -> T.List[str]:
+ return ['--coverage']
+
+ def get_lto_compile_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]:
+ args: T.List[str] = []
+ if mode == 'thin':
+ # Thin LTO requires the use of gold, lld, ld64, or lld-link
+ if not isinstance(self.linker, (AppleDynamicLinker, ClangClDynamicLinker, LLVMDynamicLinker, GnuGoldDynamicLinker)):
+ raise mesonlib.MesonException(f"LLVM's thinLTO only works with gnu gold, lld, lld-link, and ld64, not {self.linker.id}")
+ args.append(f'-flto={mode}')
+ else:
+ assert mode == 'default', 'someone forgot to wire something up'
+ args.extend(super().get_lto_compile_args(threads=threads))
+ return args
+
+ def get_lto_link_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]:
+ args = self.get_lto_compile_args(threads=threads, mode=mode)
+ # In clang -flto=0 means auto
+ if threads >= 0:
+ args.append(f'-flto-jobs={threads}')
+ return args
diff --git a/meson/mesonbuild/compilers/mixins/clike.py b/meson/mesonbuild/compilers/mixins/clike.py
new file mode 100644
index 000000000..09ad837b1
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/clike.py
@@ -0,0 +1,1267 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Mixin classes to be shared between C and C++ compilers.
+
+Without this we'll end up with awful diamond inherintance problems. The goal
+of this is to have mixin's, which are classes that are designed *not* to be
+standalone, they only work through inheritance.
+"""
+
+import collections
+import functools
+import glob
+import itertools
+import os
+import re
+import subprocess
+import typing as T
+from pathlib import Path
+
+from ... import arglist
+from ... import mesonlib
+from ... import mlog
+from ...linkers import GnuLikeDynamicLinkerMixin, SolarisDynamicLinker, CompCertDynamicLinker
+from ...mesonlib import LibType
+from ...coredata import OptionKey
+from .. import compilers
+from ..compilers import CompileCheckMode
+from .visualstudio import VisualStudioLikeCompiler
+
+if T.TYPE_CHECKING:
+ from ...dependencies import Dependency
+ from ..._typing import ImmutableListProtocol
+ from ...environment import Environment
+ from ...compilers.compilers import Compiler
+ from ...programs import ExternalProgram
+else:
+ # This is a bit clever, for mypy we pretend that these mixins descend from
+ # Compiler, so we get all of the methods and attributes defined for us, but
+ # for runtime we make them descend from object (which all classes normally
+ # do). This gives up DRYer type checking, with no runtime impact
+ Compiler = object
+
+GROUP_FLAGS = re.compile(r'''\.so (?:\.[0-9]+)? (?:\.[0-9]+)? (?:\.[0-9]+)?$ |
+ ^(?:-Wl,)?-l |
+ \.a$''', re.X)
+
+class CLikeCompilerArgs(arglist.CompilerArgs):
+ prepend_prefixes = ('-I', '-L')
+ dedup2_prefixes = ('-I', '-isystem', '-L', '-D', '-U')
+
+ # NOTE: not thorough. A list of potential corner cases can be found in
+ # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038
+ dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic')
+ dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a')
+ dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread')
+
+ def to_native(self, copy: bool = False) -> T.List[str]:
+ # This seems to be allowed, but could never work?
+ assert isinstance(self.compiler, compilers.Compiler), 'How did you get here'
+
+ # Check if we need to add --start/end-group for circular dependencies
+ # between static libraries, and for recursively searching for symbols
+ # needed by static libraries that are provided by object files or
+ # shared libraries.
+ self.flush_pre_post()
+ if copy:
+ new = self.copy()
+ else:
+ new = self
+ # This covers all ld.bfd, ld.gold, ld.gold, and xild on Linux, which
+ # all act like (or are) gnu ld
+ # TODO: this could probably be added to the DynamicLinker instead
+ if isinstance(self.compiler.linker, (GnuLikeDynamicLinkerMixin, SolarisDynamicLinker, CompCertDynamicLinker)):
+ group_start = -1
+ group_end = -1
+ for i, each in enumerate(new):
+ if not GROUP_FLAGS.search(each):
+ continue
+ group_end = i
+ if group_start < 0:
+ # First occurrence of a library
+ group_start = i
+ if group_start >= 0:
+ # Last occurrence of a library
+ new.insert(group_end + 1, '-Wl,--end-group')
+ new.insert(group_start, '-Wl,--start-group')
+ # Remove system/default include paths added with -isystem
+ default_dirs = self.compiler.get_default_include_dirs()
+ if default_dirs:
+ bad_idx_list = [] # type: T.List[int]
+ for i, each in enumerate(new):
+ if not each.startswith('-isystem'):
+ continue
+
+ # Remove the -isystem and the path if the path is a default path
+ if (each == '-isystem' and
+ i < (len(new) - 1) and
+ new[i + 1] in default_dirs):
+ bad_idx_list += [i, i + 1]
+ elif each.startswith('-isystem=') and each[9:] in default_dirs:
+ bad_idx_list += [i]
+ elif each[8:] in default_dirs:
+ bad_idx_list += [i]
+ for i in reversed(bad_idx_list):
+ new.pop(i)
+ return self.compiler.unix_args_to_native(new._container)
+
+ def __repr__(self) -> str:
+ self.flush_pre_post()
+ return f'CLikeCompilerArgs({self.compiler!r}, {self._container!r})'
+
+
+class CLikeCompiler(Compiler):
+
+ """Shared bits for the C and CPP Compilers."""
+
+ if T.TYPE_CHECKING:
+ warn_args = {} # type: T.Dict[str, T.List[str]]
+
+ # TODO: Replace this manual cache with functools.lru_cache
+ find_library_cache = {} # type: T.Dict[T.Tuple[T.Tuple[str, ...], str, T.Tuple[str, ...], str, LibType], T.Optional[T.List[str]]]
+ find_framework_cache = {} # type: T.Dict[T.Tuple[T.Tuple[str, ...], str, T.Tuple[str, ...], bool], T.Optional[T.List[str]]]
+ internal_libs = arglist.UNIXY_COMPILER_INTERNAL_LIBS
+
+ def __init__(self, exe_wrapper: T.Optional['ExternalProgram'] = None):
+ # If a child ObjC or CPP class has already set it, don't set it ourselves
+ self.can_compile_suffixes.add('h')
+ # If the exe wrapper was not found, pretend it wasn't set so that the
+ # sanity check is skipped and compiler checks use fallbacks.
+ if not exe_wrapper or not exe_wrapper.found() or not exe_wrapper.get_command():
+ self.exe_wrapper = None
+ else:
+ self.exe_wrapper = exe_wrapper
+
+ def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CLikeCompilerArgs:
+ # This is correct, mypy just doesn't understand co-operative inheritance
+ return CLikeCompilerArgs(self, args)
+
+ def needs_static_linker(self) -> bool:
+ return True # When compiling static libraries, so yes.
+
+ def get_always_args(self) -> T.List[str]:
+ '''
+ Args that are always-on for all C compilers other than MSVC
+ '''
+ return self.get_largefile_args()
+
+ def get_no_stdinc_args(self) -> T.List[str]:
+ return ['-nostdinc']
+
+ def get_no_stdlib_link_args(self) -> T.List[str]:
+ return ['-nostdlib']
+
+ def get_warn_args(self, level: str) -> T.List[str]:
+ # TODO: this should be an enum
+ return self.warn_args[level]
+
+ def get_no_warn_args(self) -> T.List[str]:
+ # Almost every compiler uses this for disabling warnings
+ return ['-w']
+
+ def get_depfile_suffix(self) -> str:
+ return 'd'
+
+ def get_exelist(self) -> T.List[str]:
+ return self.exelist.copy()
+
+ def get_preprocess_only_args(self) -> T.List[str]:
+ return ['-E', '-P']
+
+ def get_compile_only_args(self) -> T.List[str]:
+ return ['-c']
+
+ def get_no_optimization_args(self) -> T.List[str]:
+ return ['-O0']
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return ['-o', target]
+
+ def get_werror_args(self) -> T.List[str]:
+ return ['-Werror']
+
+ def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+ if path == '':
+ path = '.'
+ if is_system:
+ return ['-isystem', path]
+ return ['-I' + path]
+
+ def get_compiler_dirs(self, env: 'Environment', name: str) -> T.List[str]:
+ '''
+ Get dirs from the compiler, either `libraries:` or `programs:`
+ '''
+ return []
+
+ @functools.lru_cache()
+ def _get_library_dirs(self, env: 'Environment',
+ elf_class: T.Optional[int] = None) -> 'ImmutableListProtocol[str]':
+ # TODO: replace elf_class with enum
+ dirs = self.get_compiler_dirs(env, 'libraries')
+ if elf_class is None or elf_class == 0:
+ return dirs
+
+ # if we do have an elf class for 32-bit or 64-bit, we want to check that
+ # the directory in question contains libraries of the appropriate class. Since
+ # system directories aren't mixed, we only need to check one file for each
+ # directory and go by that. If we can't check the file for some reason, assume
+ # the compiler knows what it's doing, and accept the directory anyway.
+ retval = []
+ for d in dirs:
+ files = [f for f in os.listdir(d) if f.endswith('.so') and os.path.isfile(os.path.join(d, f))]
+ # if no files, accept directory and move on
+ if not files:
+ retval.append(d)
+ continue
+
+ for f in files:
+ file_to_check = os.path.join(d, f)
+ try:
+ with open(file_to_check, 'rb') as fd:
+ header = fd.read(5)
+ # if file is not an ELF file, it's weird, but accept dir
+ # if it is elf, and the class matches, accept dir
+ if header[1:4] != b'ELF' or int(header[4]) == elf_class:
+ retval.append(d)
+ # at this point, it's an ELF file which doesn't match the
+ # appropriate elf_class, so skip this one
+ # stop scanning after the first successful read
+ break
+ except OSError:
+ # Skip the file if we can't read it
+ pass
+
+ return retval
+
+ def get_library_dirs(self, env: 'Environment',
+ elf_class: T.Optional[int] = None) -> T.List[str]:
+ """Wrap the lru_cache so that we return a new copy and don't allow
+ mutation of the cached value.
+ """
+ return self._get_library_dirs(env, elf_class).copy()
+
+ @functools.lru_cache()
+ def _get_program_dirs(self, env: 'Environment') -> 'ImmutableListProtocol[str]':
+ '''
+ Programs used by the compiler. Also where toolchain DLLs such as
+ libstdc++-6.dll are found with MinGW.
+ '''
+ return self.get_compiler_dirs(env, 'programs')
+
+ def get_program_dirs(self, env: 'Environment') -> T.List[str]:
+ return self._get_program_dirs(env).copy()
+
+ def get_pic_args(self) -> T.List[str]:
+ return ['-fPIC']
+
+ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+ return ['-include', os.path.basename(header)]
+
+ def get_pch_name(self, header_name: str) -> str:
+ return os.path.basename(header_name) + '.' + self.get_pch_suffix()
+
+ def get_default_include_dirs(self) -> T.List[str]:
+ return []
+
+ def gen_export_dynamic_link_args(self, env: 'Environment') -> T.List[str]:
+ return self.linker.export_dynamic_args(env)
+
+ def gen_import_library_args(self, implibname: str) -> T.List[str]:
+ return self.linker.import_library_args(implibname)
+
+ def _sanity_check_impl(self, work_dir: str, environment: 'Environment',
+ sname: str, code: str) -> None:
+ mlog.debug('Sanity testing ' + self.get_display_language() + ' compiler:', ' '.join(self.exelist))
+ mlog.debug(f'Is cross compiler: {self.is_cross!s}.')
+
+ source_name = os.path.join(work_dir, sname)
+ binname = sname.rsplit('.', 1)[0]
+ mode = CompileCheckMode.LINK
+ if self.is_cross:
+ binname += '_cross'
+ if self.exe_wrapper is None:
+ # Linking cross built apps is painful. You can't really
+ # tell if you should use -nostdlib or not and for example
+ # on OSX the compiler binary is the same but you need
+ # a ton of compiler flags to differentiate between
+ # arm and x86_64. So just compile.
+ mode = CompileCheckMode.COMPILE
+ cargs, largs = self._get_basic_compiler_args(environment, mode)
+ extra_flags = cargs + self.linker_to_compiler_args(largs)
+
+ # Is a valid executable output for all toolchains and platforms
+ binname += '.exe'
+ # Write binary check source
+ binary_name = os.path.join(work_dir, binname)
+ with open(source_name, 'w', encoding='utf-8') as ofile:
+ ofile.write(code)
+ # Compile sanity check
+ # NOTE: extra_flags must be added at the end. On MSVC, it might contain a '/link' argument
+ # after which all further arguments will be passed directly to the linker
+ cmdlist = self.exelist + [sname] + self.get_output_args(binname) + extra_flags
+ pc, stdo, stde = mesonlib.Popen_safe(cmdlist, cwd=work_dir)
+ mlog.debug('Sanity check compiler command line:', ' '.join(cmdlist))
+ mlog.debug('Sanity check compile stdout:')
+ mlog.debug(stdo)
+ mlog.debug('-----\nSanity check compile stderr:')
+ mlog.debug(stde)
+ mlog.debug('-----')
+ if pc.returncode != 0:
+ raise mesonlib.EnvironmentException(f'Compiler {self.name_string()} can not compile programs.')
+ # Run sanity check
+ if self.is_cross:
+ if self.exe_wrapper is None:
+ # Can't check if the binaries run so we have to assume they do
+ return
+ cmdlist = self.exe_wrapper.get_command() + [binary_name]
+ else:
+ cmdlist = [binary_name]
+ mlog.debug('Running test binary command: ' + ' '.join(cmdlist))
+ try:
+ pe = subprocess.Popen(cmdlist)
+ except Exception as e:
+ raise mesonlib.EnvironmentException(f'Could not invoke sanity test executable: {e!s}.')
+ pe.wait()
+ if pe.returncode != 0:
+ raise mesonlib.EnvironmentException(f'Executables created by {self.language} compiler {self.name_string()} are not runnable.')
+
+ def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+ code = 'int main(void) { int class=0; return class; }\n'
+ return self._sanity_check_impl(work_dir, environment, 'sanitycheckc.c', code)
+
+ def check_header(self, hname: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
+ code = f'''{prefix}
+ #include <{hname}>'''
+ return self.compiles(code, env, extra_args=extra_args,
+ dependencies=dependencies)
+
+ def has_header(self, hname: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None,
+ disable_cache: bool = False) -> T.Tuple[bool, bool]:
+ code = f'''{prefix}
+ #ifdef __has_include
+ #if !__has_include("{hname}")
+ #error "Header '{hname}' could not be found"
+ #endif
+ #else
+ #include <{hname}>
+ #endif'''
+ return self.compiles(code, env, extra_args=extra_args,
+ dependencies=dependencies, mode='preprocess', disable_cache=disable_cache)
+
+ def has_header_symbol(self, hname: str, symbol: str, prefix: str,
+ env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
+ t = f'''{prefix}
+ #include <{hname}>
+ int main(void) {{
+ /* If it's not defined as a macro, try to use as a symbol */
+ #ifndef {symbol}
+ {symbol};
+ #endif
+ return 0;
+ }}'''
+ return self.compiles(t, env, extra_args=extra_args,
+ dependencies=dependencies)
+
+ def _get_basic_compiler_args(self, env: 'Environment', mode: CompileCheckMode) -> T.Tuple[T.List[str], T.List[str]]:
+ cargs = [] # type: T.List[str]
+ largs = [] # type: T.List[str]
+ if mode is CompileCheckMode.LINK:
+ # Sometimes we need to manually select the CRT to use with MSVC.
+ # One example is when trying to do a compiler check that involves
+ # linking with static libraries since MSVC won't select a CRT for
+ # us in that case and will error out asking us to pick one.
+ try:
+ crt_val = env.coredata.options[OptionKey('b_vscrt')].value
+ buildtype = env.coredata.options[OptionKey('buildtype')].value
+ cargs += self.get_crt_compile_args(crt_val, buildtype)
+ except (KeyError, AttributeError):
+ pass
+
+ # Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS and CPPFLAGS from the env
+ sys_args = env.coredata.get_external_args(self.for_machine, self.language)
+ if isinstance(sys_args, str):
+ sys_args = [sys_args]
+ # Apparently it is a thing to inject linker flags both
+ # via CFLAGS _and_ LDFLAGS, even though the former are
+ # also used during linking. These flags can break
+ # argument checks. Thanks, Autotools.
+ cleaned_sys_args = self.remove_linkerlike_args(sys_args)
+ cargs += cleaned_sys_args
+
+ if mode is CompileCheckMode.LINK:
+ ld_value = env.lookup_binary_entry(self.for_machine, self.language + '_ld')
+ if ld_value is not None:
+ largs += self.use_linker_args(ld_value[0])
+
+ # Add LDFLAGS from the env
+ sys_ld_args = env.coredata.get_external_link_args(self.for_machine, self.language)
+ # CFLAGS and CXXFLAGS go to both linking and compiling, but we want them
+ # to only appear on the command line once. Remove dupes.
+ largs += [x for x in sys_ld_args if x not in sys_args]
+
+ cargs += self.get_compiler_args_for_mode(mode)
+ return cargs, largs
+
+ def build_wrapper_args(self, env: 'Environment',
+ extra_args: T.Union[None, arglist.CompilerArgs, T.List[str]],
+ dependencies: T.Optional[T.List['Dependency']],
+ mode: CompileCheckMode = CompileCheckMode.COMPILE) -> arglist.CompilerArgs:
+ # TODO: the caller should handle the listfing of these arguments
+ if extra_args is None:
+ extra_args = []
+ else:
+ # TODO: we want to do this in the caller
+ extra_args = mesonlib.listify(extra_args)
+ extra_args = mesonlib.listify([e(mode.value) if callable(e) else e for e in extra_args])
+
+ if dependencies is None:
+ dependencies = []
+ elif not isinstance(dependencies, collections.abc.Iterable):
+ # TODO: we want to ensure the front end does the listifing here
+ dependencies = [dependencies] # type: ignore
+ # Collect compiler arguments
+ cargs = self.compiler_args() # type: arglist.CompilerArgs
+ largs = [] # type: T.List[str]
+ for d in dependencies:
+ # Add compile flags needed by dependencies
+ cargs += d.get_compile_args()
+ if mode is CompileCheckMode.LINK:
+ # Add link flags needed to find dependencies
+ largs += d.get_link_args()
+
+ ca, la = self._get_basic_compiler_args(env, mode)
+ cargs += ca
+ largs += la
+
+ cargs += self.get_compiler_check_args(mode)
+
+ # on MSVC compiler and linker flags must be separated by the "/link" argument
+ # at this point, the '/link' argument may already be part of extra_args, otherwise, it is added here
+ if self.linker_to_compiler_args([]) == ['/link'] and largs != [] and not ('/link' in extra_args):
+ extra_args += ['/link']
+
+ args = cargs + extra_args + largs
+ return args
+
+ def run(self, code: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> compilers.RunResult:
+ need_exe_wrapper = env.need_exe_wrapper(self.for_machine)
+ if need_exe_wrapper and self.exe_wrapper is None:
+ raise compilers.CrossNoRunException('Can not run test applications in this cross environment.')
+ with self._build_wrapper(code, env, extra_args, dependencies, mode='link', want_output=True) as p:
+ if p.returncode != 0:
+ mlog.debug(f'Could not compile test file {p.input_name}: {p.returncode}\n')
+ return compilers.RunResult(False)
+ if need_exe_wrapper:
+ cmdlist = self.exe_wrapper.get_command() + [p.output_name]
+ else:
+ cmdlist = [p.output_name]
+ try:
+ pe, so, se = mesonlib.Popen_safe(cmdlist)
+ except Exception as e:
+ mlog.debug(f'Could not run: {cmdlist} (error: {e})\n')
+ return compilers.RunResult(False)
+
+ mlog.debug('Program stdout:\n')
+ mlog.debug(so)
+ mlog.debug('Program stderr:\n')
+ mlog.debug(se)
+ return compilers.RunResult(True, pe.returncode, so, se)
+
+ def _compile_int(self, expression: str, prefix: str, env: 'Environment',
+ extra_args: T.Optional[T.List[str]],
+ dependencies: T.Optional[T.List['Dependency']]) -> bool:
+ t = f'''#include <stdio.h>
+ {prefix}
+ int main(void) {{ static int a[1-2*!({expression})]; a[0]=0; return 0; }}'''
+ return self.compiles(t, env, extra_args=extra_args,
+ dependencies=dependencies)[0]
+
+ def cross_compute_int(self, expression: str, low: T.Optional[int], high: T.Optional[int],
+ guess: T.Optional[int], prefix: str, env: 'Environment',
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> int:
+ # Try user's guess first
+ if isinstance(guess, int):
+ if self._compile_int(f'{expression} == {guess}', prefix, env, extra_args, dependencies):
+ return guess
+
+ # If no bounds are given, compute them in the limit of int32
+ maxint = 0x7fffffff
+ minint = -0x80000000
+ if not isinstance(low, int) or not isinstance(high, int):
+ if self._compile_int(f'{expression} >= 0', prefix, env, extra_args, dependencies):
+ low = cur = 0
+ while self._compile_int(f'{expression} > {cur}', prefix, env, extra_args, dependencies):
+ low = cur + 1
+ if low > maxint:
+ raise mesonlib.EnvironmentException('Cross-compile check overflowed')
+ cur = cur * 2 + 1
+ if cur > maxint:
+ cur = maxint
+ high = cur
+ else:
+ high = cur = -1
+ while self._compile_int(f'{expression} < {cur}', prefix, env, extra_args, dependencies):
+ high = cur - 1
+ if high < minint:
+ raise mesonlib.EnvironmentException('Cross-compile check overflowed')
+ cur = cur * 2
+ if cur < minint:
+ cur = minint
+ low = cur
+ else:
+ # Sanity check limits given by user
+ if high < low:
+ raise mesonlib.EnvironmentException('high limit smaller than low limit')
+ condition = f'{expression} <= {high} && {expression} >= {low}'
+ if not self._compile_int(condition, prefix, env, extra_args, dependencies):
+ raise mesonlib.EnvironmentException('Value out of given range')
+
+ # Binary search
+ while low != high:
+ cur = low + int((high - low) / 2)
+ if self._compile_int(f'{expression} <= {cur}', prefix, env, extra_args, dependencies):
+ high = cur
+ else:
+ low = cur + 1
+
+ return low
+
+ def compute_int(self, expression: str, low: T.Optional[int], high: T.Optional[int],
+ guess: T.Optional[int], prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> int:
+ if extra_args is None:
+ extra_args = []
+ if self.is_cross:
+ return self.cross_compute_int(expression, low, high, guess, prefix, env, extra_args, dependencies)
+ t = f'''#include<stdio.h>
+ {prefix}
+ int main(void) {{
+ printf("%ld\\n", (long)({expression}));
+ return 0;
+ }};'''
+ res = self.run(t, env, extra_args=extra_args,
+ dependencies=dependencies)
+ if not res.compiled:
+ return -1
+ if res.returncode != 0:
+ raise mesonlib.EnvironmentException('Could not run compute_int test binary.')
+ return int(res.stdout)
+
+ def cross_sizeof(self, typename: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> int:
+ if extra_args is None:
+ extra_args = []
+ t = f'''#include <stdio.h>
+ {prefix}
+ int main(void) {{
+ {typename} something;
+ return 0;
+ }}'''
+ if not self.compiles(t, env, extra_args=extra_args,
+ dependencies=dependencies)[0]:
+ return -1
+ return self.cross_compute_int(f'sizeof({typename})', None, None, None, prefix, env, extra_args, dependencies)
+
+ def sizeof(self, typename: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> int:
+ if extra_args is None:
+ extra_args = []
+ if self.is_cross:
+ return self.cross_sizeof(typename, prefix, env, extra_args=extra_args,
+ dependencies=dependencies)
+ t = f'''#include<stdio.h>
+ {prefix}
+ int main(void) {{
+ printf("%ld\\n", (long)(sizeof({typename})));
+ return 0;
+ }};'''
+ res = self.run(t, env, extra_args=extra_args,
+ dependencies=dependencies)
+ if not res.compiled:
+ return -1
+ if res.returncode != 0:
+ raise mesonlib.EnvironmentException('Could not run sizeof test binary.')
+ return int(res.stdout)
+
+ def cross_alignment(self, typename: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> int:
+ if extra_args is None:
+ extra_args = []
+ t = f'''#include <stdio.h>
+ {prefix}
+ int main(void) {{
+ {typename} something;
+ return 0;
+ }}'''
+ if not self.compiles(t, env, extra_args=extra_args,
+ dependencies=dependencies)[0]:
+ return -1
+ t = f'''#include <stddef.h>
+ {prefix}
+ struct tmp {{
+ char c;
+ {typename} target;
+ }};'''
+ return self.cross_compute_int('offsetof(struct tmp, target)', None, None, None, t, env, extra_args, dependencies)
+
+ def alignment(self, typename: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> int:
+ if extra_args is None:
+ extra_args = []
+ if self.is_cross:
+ return self.cross_alignment(typename, prefix, env, extra_args=extra_args,
+ dependencies=dependencies)
+ t = f'''#include <stdio.h>
+ #include <stddef.h>
+ {prefix}
+ struct tmp {{
+ char c;
+ {typename} target;
+ }};
+ int main(void) {{
+ printf("%d", (int)offsetof(struct tmp, target));
+ return 0;
+ }}'''
+ res = self.run(t, env, extra_args=extra_args,
+ dependencies=dependencies)
+ if not res.compiled:
+ raise mesonlib.EnvironmentException('Could not compile alignment test.')
+ if res.returncode != 0:
+ raise mesonlib.EnvironmentException('Could not run alignment test binary.')
+ align = int(res.stdout)
+ if align == 0:
+ raise mesonlib.EnvironmentException(f'Could not determine alignment of {typename}. Sorry. You might want to file a bug.')
+ return align
+
+ def get_define(self, dname: str, prefix: str, env: 'Environment',
+ extra_args: T.Optional[T.List[str]],
+ dependencies: T.Optional[T.List['Dependency']],
+ disable_cache: bool = False) -> T.Tuple[str, bool]:
+ delim = '"MESON_GET_DEFINE_DELIMITER"'
+ code = f'''
+ {prefix}
+ #ifndef {dname}
+ # define {dname}
+ #endif
+ {delim}\n{dname}'''
+ args = self.build_wrapper_args(env, extra_args, dependencies,
+ mode=CompileCheckMode.PREPROCESS).to_native()
+ func = functools.partial(self.cached_compile, code, env.coredata, extra_args=args, mode='preprocess')
+ if disable_cache:
+ func = functools.partial(self.compile, code, extra_args=args, mode='preprocess', temp_dir=env.scratch_dir)
+ with func() as p:
+ cached = p.cached
+ if p.returncode != 0:
+ raise mesonlib.EnvironmentException(f'Could not get define {dname!r}')
+ # Get the preprocessed value after the delimiter,
+ # minus the extra newline at the end and
+ # merge string literals.
+ return self._concatenate_string_literals(p.stdout.split(delim + '\n')[-1][:-1]), cached
+
+ def get_return_value(self, fname: str, rtype: str, prefix: str,
+ env: 'Environment', extra_args: T.Optional[T.List[str]],
+ dependencies: T.Optional[T.List['Dependency']]) -> T.Union[str, int]:
+ # TODO: rtype should be an enum.
+ # TODO: maybe we can use overload to tell mypy when this will return int vs str?
+ if rtype == 'string':
+ fmt = '%s'
+ cast = '(char*)'
+ elif rtype == 'int':
+ fmt = '%lli'
+ cast = '(long long int)'
+ else:
+ raise AssertionError(f'BUG: Unknown return type {rtype!r}')
+ code = f'''{prefix}
+ #include <stdio.h>
+ int main(void) {{
+ printf ("{fmt}", {cast} {fname}());
+ return 0;
+ }}'''
+ res = self.run(code, env, extra_args=extra_args, dependencies=dependencies)
+ if not res.compiled:
+ raise mesonlib.EnvironmentException(f'Could not get return value of {fname}()')
+ if rtype == 'string':
+ return res.stdout
+ elif rtype == 'int':
+ try:
+ return int(res.stdout.strip())
+ except ValueError:
+ raise mesonlib.EnvironmentException(f'Return value of {fname}() is not an int')
+ assert False, 'Unreachable'
+
+ @staticmethod
+ def _no_prototype_templ() -> T.Tuple[str, str]:
+ """
+ Try to find the function without a prototype from a header by defining
+ our own dummy prototype and trying to link with the C library (and
+ whatever else the compiler links in by default). This is very similar
+ to the check performed by Autoconf for AC_CHECK_FUNCS.
+ """
+ # Define the symbol to something else since it is defined by the
+ # includes or defines listed by the user or by the compiler. This may
+ # include, for instance _GNU_SOURCE which must be defined before
+ # limits.h, which includes features.h
+ # Then, undef the symbol to get rid of it completely.
+ head = '''
+ #define {func} meson_disable_define_of_{func}
+ {prefix}
+ #include <limits.h>
+ #undef {func}
+ '''
+ # Override any GCC internal prototype and declare our own definition for
+ # the symbol. Use char because that's unlikely to be an actual return
+ # value for a function which ensures that we override the definition.
+ head += '''
+ #ifdef __cplusplus
+ extern "C"
+ #endif
+ char {func} (void);
+ '''
+ # The actual function call
+ main = '''
+ int main(void) {{
+ return {func} ();
+ }}'''
+ return head, main
+
+ @staticmethod
+ def _have_prototype_templ() -> T.Tuple[str, str]:
+ """
+ Returns a head-er and main() call that uses the headers listed by the
+ user for the function prototype while checking if a function exists.
+ """
+ # Add the 'prefix', aka defines, includes, etc that the user provides
+ # This may include, for instance _GNU_SOURCE which must be defined
+ # before limits.h, which includes features.h
+ head = '{prefix}\n#include <limits.h>\n'
+ # We don't know what the function takes or returns, so return it as an int.
+ # Just taking the address or comparing it to void is not enough because
+ # compilers are smart enough to optimize it away. The resulting binary
+ # is not run so we don't care what the return value is.
+ main = '''\nint main(void) {{
+ void *a = (void*) &{func};
+ long long b = (long long) a;
+ return (int) b;
+ }}'''
+ return head, main
+
+ def has_function(self, funcname: str, prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
+ """Determine if a function exists.
+
+ First, this function looks for the symbol in the default libraries
+ provided by the compiler (stdlib + a few others usually). If that
+ fails, it checks if any of the headers specified in the prefix provide
+ an implementation of the function, and if that fails, it checks if it's
+ implemented as a compiler-builtin.
+ """
+ if extra_args is None:
+ extra_args = []
+
+ # Short-circuit if the check is already provided by the cross-info file
+ varname = 'has function ' + funcname
+ varname = varname.replace(' ', '_')
+ if self.is_cross:
+ val = env.properties.host.get(varname, None)
+ if val is not None:
+ if isinstance(val, bool):
+ return val, False
+ raise mesonlib.EnvironmentException(f'Cross variable {varname} is not a boolean.')
+
+ # TODO: we really need a protocol for this,
+ #
+ # class StrProto(typing.Protocol):
+ # def __str__(self) -> str: ...
+ fargs = {'prefix': prefix, 'func': funcname} # type: T.Dict[str, T.Union[str, bool, int]]
+
+ # glibc defines functions that are not available on Linux as stubs that
+ # fail with ENOSYS (such as e.g. lchmod). In this case we want to fail
+ # instead of detecting the stub as a valid symbol.
+ # We already included limits.h earlier to ensure that these are defined
+ # for stub functions.
+ stubs_fail = '''
+ #if defined __stub_{func} || defined __stub___{func}
+ fail fail fail this function is not going to work
+ #endif
+ '''
+
+ # If we have any includes in the prefix supplied by the user, assume
+ # that the user wants us to use the symbol prototype defined in those
+ # includes. If not, then try to do the Autoconf-style check with
+ # a dummy prototype definition of our own.
+ # This is needed when the linker determines symbol availability from an
+ # SDK based on the prototype in the header provided by the SDK.
+ # Ignoring this prototype would result in the symbol always being
+ # marked as available.
+ if '#include' in prefix:
+ head, main = self._have_prototype_templ()
+ else:
+ head, main = self._no_prototype_templ()
+ templ = head + stubs_fail + main
+
+ res, cached = self.links(templ.format(**fargs), env, extra_args=extra_args,
+ dependencies=dependencies)
+ if res:
+ return True, cached
+
+ # MSVC does not have compiler __builtin_-s.
+ if self.get_id() in {'msvc', 'intel-cl'}:
+ return False, False
+
+ # Detect function as a built-in
+ #
+ # Some functions like alloca() are defined as compiler built-ins which
+ # are inlined by the compiler and you can't take their address, so we
+ # need to look for them differently. On nice compilers like clang, we
+ # can just directly use the __has_builtin() macro.
+ fargs['no_includes'] = '#include' not in prefix
+ is_builtin = funcname.startswith('__builtin_')
+ fargs['is_builtin'] = is_builtin
+ fargs['__builtin_'] = '' if is_builtin else '__builtin_'
+ t = '''{prefix}
+ int main(void) {{
+
+ /* With some toolchains (MSYS2/mingw for example) the compiler
+ * provides various builtins which are not really implemented and
+ * fall back to the stdlib where they aren't provided and fail at
+ * build/link time. In case the user provides a header, including
+ * the header didn't lead to the function being defined, and the
+ * function we are checking isn't a builtin itself we assume the
+ * builtin is not functional and we just error out. */
+ #if !{no_includes:d} && !defined({func}) && !{is_builtin:d}
+ #error "No definition for {__builtin_}{func} found in the prefix"
+ #endif
+
+ #ifdef __has_builtin
+ #if !__has_builtin({__builtin_}{func})
+ #error "{__builtin_}{func} not found"
+ #endif
+ #elif ! defined({func})
+ {__builtin_}{func};
+ #endif
+ return 0;
+ }}'''
+ return self.links(t.format(**fargs), env, extra_args=extra_args,
+ dependencies=dependencies)
+
+ def has_members(self, typename: str, membernames: T.List[str],
+ prefix: str, env: 'Environment', *,
+ extra_args: T.Optional[T.List[str]] = None,
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
+ if extra_args is None:
+ extra_args = []
+ # Create code that accesses all members
+ members = ''
+ for member in membernames:
+ members += f'foo.{member};\n'
+ t = f'''{prefix}
+ void bar(void) {{
+ {typename} foo;
+ {members}
+ }};'''
+ return self.compiles(t, env, extra_args=extra_args,
+ dependencies=dependencies)
+
+ def has_type(self, typename: str, prefix: str, env: 'Environment', extra_args: T.List[str],
+ dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]:
+ t = f'''{prefix}
+ void bar(void) {{
+ sizeof({typename});
+ }};'''
+ return self.compiles(t, env, extra_args=extra_args,
+ dependencies=dependencies)
+
+ def symbols_have_underscore_prefix(self, env: 'Environment') -> bool:
+ '''
+ Check if the compiler prefixes an underscore to global C symbols
+ '''
+ symbol_name = b'meson_uscore_prefix'
+ code = '''#ifdef __cplusplus
+ extern "C" {
+ #endif
+ void ''' + symbol_name.decode() + ''' (void) {}
+ #ifdef __cplusplus
+ }
+ #endif
+ '''
+ args = self.get_compiler_check_args(CompileCheckMode.COMPILE)
+ n = 'symbols_have_underscore_prefix'
+ with self._build_wrapper(code, env, extra_args=args, mode='compile', want_output=True, temp_dir=env.scratch_dir) as p:
+ if p.returncode != 0:
+ raise RuntimeError(f'BUG: Unable to compile {n!r} check: {p.stdout}')
+ if not os.path.isfile(p.output_name):
+ raise RuntimeError(f'BUG: Can\'t find compiled test code for {n!r} check')
+ with open(p.output_name, 'rb') as o:
+ for line in o:
+ # Check if the underscore form of the symbol is somewhere
+ # in the output file.
+ if b'_' + symbol_name in line:
+ mlog.debug("Symbols have underscore prefix: YES")
+ return True
+ # Else, check if the non-underscored form is present
+ elif symbol_name in line:
+ mlog.debug("Symbols have underscore prefix: NO")
+ return False
+ raise RuntimeError(f'BUG: {n!r} check failed unexpectedly')
+
+ def _get_patterns(self, env: 'Environment', prefixes: T.List[str], suffixes: T.List[str], shared: bool = False) -> T.List[str]:
+ patterns = [] # type: T.List[str]
+ for p in prefixes:
+ for s in suffixes:
+ patterns.append(p + '{}.' + s)
+ if shared and env.machines[self.for_machine].is_openbsd():
+ # Shared libraries on OpenBSD can be named libfoo.so.X.Y:
+ # https://www.openbsd.org/faq/ports/specialtopics.html#SharedLibs
+ #
+ # This globbing is probably the best matching we can do since regex
+ # is expensive. It's wrong in many edge cases, but it will match
+ # correctly-named libraries and hopefully no one on OpenBSD names
+ # their files libfoo.so.9a.7b.1.0
+ for p in prefixes:
+ patterns.append(p + '{}.so.[0-9]*.[0-9]*')
+ return patterns
+
+ def get_library_naming(self, env: 'Environment', libtype: LibType, strict: bool = False) -> T.Tuple[str, ...]:
+ '''
+ Get library prefixes and suffixes for the target platform ordered by
+ priority
+ '''
+ stlibext = ['a']
+ # We've always allowed libname to be both `foo` and `libfoo`, and now
+ # people depend on it. Also, some people use prebuilt `foo.so` instead
+ # of `libfoo.so` for unknown reasons, and may also want to create
+ # `foo.so` by setting name_prefix to ''
+ if strict and not isinstance(self, VisualStudioLikeCompiler): # lib prefix is not usually used with msvc
+ prefixes = ['lib']
+ else:
+ prefixes = ['lib', '']
+ # Library suffixes and prefixes
+ if env.machines[self.for_machine].is_darwin():
+ shlibext = ['dylib', 'so']
+ elif env.machines[self.for_machine].is_windows():
+ # FIXME: .lib files can be import or static so we should read the
+ # file, figure out which one it is, and reject the wrong kind.
+ if isinstance(self, VisualStudioLikeCompiler):
+ shlibext = ['lib']
+ else:
+ shlibext = ['dll.a', 'lib', 'dll']
+ # Yep, static libraries can also be foo.lib
+ stlibext += ['lib']
+ elif env.machines[self.for_machine].is_cygwin():
+ shlibext = ['dll', 'dll.a']
+ prefixes = ['cyg'] + prefixes
+ else:
+ # Linux/BSDs
+ shlibext = ['so']
+ # Search priority
+ if libtype is LibType.PREFER_SHARED:
+ patterns = self._get_patterns(env, prefixes, shlibext, True)
+ patterns.extend([x for x in self._get_patterns(env, prefixes, stlibext, False) if x not in patterns])
+ elif libtype is LibType.PREFER_STATIC:
+ patterns = self._get_patterns(env, prefixes, stlibext, False)
+ patterns.extend([x for x in self._get_patterns(env, prefixes, shlibext, True) if x not in patterns])
+ elif libtype is LibType.SHARED:
+ patterns = self._get_patterns(env, prefixes, shlibext, True)
+ else:
+ assert libtype is LibType.STATIC
+ patterns = self._get_patterns(env, prefixes, stlibext, False)
+ return tuple(patterns)
+
+ @staticmethod
+ def _sort_shlibs_openbsd(libs: T.List[str]) -> T.List[str]:
+ filtered = [] # type: T.List[str]
+ for lib in libs:
+ # Validate file as a shared library of type libfoo.so.X.Y
+ ret = lib.rsplit('.so.', maxsplit=1)
+ if len(ret) != 2:
+ continue
+ try:
+ float(ret[1])
+ except ValueError:
+ continue
+ filtered.append(lib)
+ float_cmp = lambda x: float(x.rsplit('.so.', maxsplit=1)[1])
+ return sorted(filtered, key=float_cmp, reverse=True)
+
+ @classmethod
+ def _get_trials_from_pattern(cls, pattern: str, directory: str, libname: str) -> T.List[Path]:
+ f = Path(directory) / pattern.format(libname)
+ # Globbing for OpenBSD
+ if '*' in pattern:
+ # NOTE: globbing matches directories and broken symlinks
+ # so we have to do an isfile test on it later
+ return [Path(x) for x in cls._sort_shlibs_openbsd(glob.glob(str(f)))]
+ return [f]
+
+ @staticmethod
+ def _get_file_from_list(env: 'Environment', paths: T.List[Path]) -> Path:
+ '''
+ We just check whether the library exists. We can't do a link check
+ because the library might have unresolved symbols that require other
+ libraries. On macOS we check if the library matches our target
+ architecture.
+ '''
+ # If not building on macOS for Darwin, do a simple file check
+ if not env.machines.host.is_darwin() or not env.machines.build.is_darwin():
+ for p in paths:
+ if p.is_file():
+ return p
+ # Run `lipo` and check if the library supports the arch we want
+ for p in paths:
+ if not p.is_file():
+ continue
+ archs = mesonlib.darwin_get_object_archs(str(p))
+ if archs and env.machines.host.cpu_family in archs:
+ return p
+ else:
+ mlog.debug(f'Rejected {p}, supports {archs} but need {env.machines.host.cpu_family}')
+ return None
+
+ @functools.lru_cache()
+ def output_is_64bit(self, env: 'Environment') -> bool:
+ '''
+ returns true if the output produced is 64-bit, false if 32-bit
+ '''
+ return self.sizeof('void *', '', env) == 8
+
+ def _find_library_real(self, libname: str, env: 'Environment', extra_dirs: T.List[str], code: str, libtype: LibType) -> T.Optional[T.List[str]]:
+ # First try if we can just add the library as -l.
+ # Gcc + co seem to prefer builtin lib dirs to -L dirs.
+ # Only try to find std libs if no extra dirs specified.
+ # The built-in search procedure will always favour .so and then always
+ # search for .a. This is only allowed if libtype is LibType.PREFER_SHARED
+ if ((not extra_dirs and libtype is LibType.PREFER_SHARED) or
+ libname in self.internal_libs):
+ cargs = ['-l' + libname]
+ largs = self.get_linker_always_args() + self.get_allow_undefined_link_args()
+ extra_args = cargs + self.linker_to_compiler_args(largs)
+
+ if self.links(code, env, extra_args=extra_args, disable_cache=True)[0]:
+ return cargs
+ # Don't do a manual search for internal libs
+ if libname in self.internal_libs:
+ return None
+ # Not found or we want to use a specific libtype? Try to find the
+ # library file itself.
+ patterns = self.get_library_naming(env, libtype)
+ # try to detect if we are 64-bit or 32-bit. If we can't
+ # detect, we will just skip path validity checks done in
+ # get_library_dirs() call
+ try:
+ if self.output_is_64bit(env):
+ elf_class = 2
+ else:
+ elf_class = 1
+ except (mesonlib.MesonException, KeyError): # TODO evaluate if catching KeyError is wanted here
+ elf_class = 0
+ # Search in the specified dirs, and then in the system libraries
+ for d in itertools.chain(extra_dirs, self.get_library_dirs(env, elf_class)):
+ for p in patterns:
+ trials = self._get_trials_from_pattern(p, d, libname)
+ if not trials:
+ continue
+ trial = self._get_file_from_list(env, trials)
+ if not trial:
+ continue
+ return [trial.as_posix()]
+ return None
+
+ def _find_library_impl(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
+ code: str, libtype: LibType) -> T.Optional[T.List[str]]:
+ # These libraries are either built-in or invalid
+ if libname in self.ignore_libs:
+ return []
+ if isinstance(extra_dirs, str):
+ extra_dirs = [extra_dirs]
+ key = (tuple(self.exelist), libname, tuple(extra_dirs), code, libtype)
+ if key not in self.find_library_cache:
+ value = self._find_library_real(libname, env, extra_dirs, code, libtype)
+ self.find_library_cache[key] = value
+ else:
+ value = self.find_library_cache[key]
+ if value is None:
+ return None
+ return value.copy()
+
+ def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
+ libtype: LibType = LibType.PREFER_SHARED) -> T.Optional[T.List[str]]:
+ code = 'int main(void) { return 0; }\n'
+ return self._find_library_impl(libname, env, extra_dirs, code, libtype)
+
+ def find_framework_paths(self, env: 'Environment') -> T.List[str]:
+ '''
+ These are usually /Library/Frameworks and /System/Library/Frameworks,
+ unless you select a particular macOS SDK with the -isysroot flag.
+ You can also add to this by setting -F in CFLAGS.
+ '''
+ # TODO: this really needs to be *AppleClang*, not just any clang.
+ if self.id != 'clang':
+ raise mesonlib.MesonException('Cannot find framework path with non-clang compiler')
+ # Construct the compiler command-line
+ commands = self.get_exelist() + ['-v', '-E', '-']
+ commands += self.get_always_args()
+ # Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS from the env
+ commands += env.coredata.get_external_args(self.for_machine, self.language)
+ mlog.debug('Finding framework path by running: ', ' '.join(commands), '\n')
+ os_env = os.environ.copy()
+ os_env['LC_ALL'] = 'C'
+ _, _, stde = mesonlib.Popen_safe(commands, env=os_env, stdin=subprocess.PIPE)
+ paths = [] # T.List[str]
+ for line in stde.split('\n'):
+ if '(framework directory)' not in line:
+ continue
+ # line is of the form:
+ # ` /path/to/framework (framework directory)`
+ paths.append(line[:-21].strip())
+ return paths
+
+ def _find_framework_real(self, name: str, env: 'Environment', extra_dirs: T.List[str], allow_system: bool) -> T.Optional[T.List[str]]:
+ code = 'int main(void) { return 0; }'
+ link_args = []
+ for d in extra_dirs:
+ link_args += ['-F' + d]
+ # We can pass -Z to disable searching in the system frameworks, but
+ # then we must also pass -L/usr/lib to pick up libSystem.dylib
+ extra_args = [] if allow_system else ['-Z', '-L/usr/lib']
+ link_args += ['-framework', name]
+ if self.links(code, env, extra_args=(extra_args + link_args), disable_cache=True)[0]:
+ return link_args
+ return None
+
+ def _find_framework_impl(self, name: str, env: 'Environment', extra_dirs: T.List[str],
+ allow_system: bool) -> T.Optional[T.List[str]]:
+ if isinstance(extra_dirs, str):
+ extra_dirs = [extra_dirs]
+ key = (tuple(self.exelist), name, tuple(extra_dirs), allow_system)
+ if key in self.find_framework_cache:
+ value = self.find_framework_cache[key]
+ else:
+ value = self._find_framework_real(name, env, extra_dirs, allow_system)
+ self.find_framework_cache[key] = value
+ if value is None:
+ return None
+ return value.copy()
+
+ def find_framework(self, name: str, env: 'Environment', extra_dirs: T.List[str],
+ allow_system: bool = True) -> T.Optional[T.List[str]]:
+ '''
+ Finds the framework with the specified name, and returns link args for
+ the same or returns None when the framework is not found.
+ '''
+ # TODO: maybe this belongs in clang? also, should probably check for macOS?
+ if self.id != 'clang':
+ raise mesonlib.MesonException('Cannot find frameworks with non-clang compiler')
+ return self._find_framework_impl(name, env, extra_dirs, allow_system)
+
+ def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+ # TODO: does this belong here or in GnuLike or maybe PosixLike?
+ return []
+
+ def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+ # TODO: does this belong here or in GnuLike or maybe PosixLike?
+ return []
+
+ def thread_flags(self, env: 'Environment') -> T.List[str]:
+ # TODO: does this belong here or in GnuLike or maybe PosixLike?
+ host_m = env.machines[self.for_machine]
+ if host_m.is_haiku() or host_m.is_darwin():
+ return []
+ return ['-pthread']
+
+ def linker_to_compiler_args(self, args: T.List[str]) -> T.List[str]:
+ return args.copy()
+
+ def has_arguments(self, args: T.List[str], env: 'Environment', code: str,
+ mode: str) -> T.Tuple[bool, bool]:
+ return self.compiles(code, env, extra_args=args, mode=mode)
+
+ def _has_multi_arguments(self, args: T.List[str], env: 'Environment', code: str) -> T.Tuple[bool, bool]:
+ new_args = [] # type: T.List[str]
+ for arg in args:
+ # some compilers, e.g. GCC, don't warn for unsupported warning-disable
+ # flags, so when we are testing a flag like "-Wno-forgotten-towel", also
+ # check the equivalent enable flag too "-Wforgotten-towel"
+ if arg.startswith('-Wno-'):
+ new_args.append('-W' + arg[5:])
+ if arg.startswith('-Wl,'):
+ mlog.warning(f'{arg} looks like a linker argument, '
+ 'but has_argument and other similar methods only '
+ 'support checking compiler arguments. Using them '
+ 'to check linker arguments are never supported, '
+ 'and results are likely to be wrong regardless of '
+ 'the compiler you are using. has_link_argument or '
+ 'other similar method can be used instead.')
+ new_args.append(arg)
+ return self.has_arguments(new_args, env, code, mode='compile')
+
+ def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
+ return self._has_multi_arguments(args, env, 'extern int i;\nint i;\n')
+
+ def _has_multi_link_arguments(self, args: T.List[str], env: 'Environment', code: str) -> T.Tuple[bool, bool]:
+ # First time we check for link flags we need to first check if we have
+ # --fatal-warnings, otherwise some linker checks could give some
+ # false positive.
+ args = self.linker.fatal_warnings() + args
+ args = self.linker_to_compiler_args(args)
+ return self.has_arguments(args, env, code, mode='link')
+
+ def has_multi_link_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
+ return self._has_multi_link_arguments(args, env, 'int main(void) { return 0; }\n')
+
+ @staticmethod
+ def _concatenate_string_literals(s: str) -> str:
+ pattern = re.compile(r'(?P<pre>.*([^\\]")|^")(?P<str1>([^\\"]|\\.)*)"\s+"(?P<str2>([^\\"]|\\.)*)(?P<post>".*)')
+ ret = s
+ m = pattern.match(ret)
+ while m:
+ ret = ''.join(m.group('pre', 'str1', 'str2', 'post'))
+ m = pattern.match(ret)
+ return ret
+
+ def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]:
+ # Most compilers (such as GCC and Clang) only warn about unknown or
+ # ignored attributes, so force an error. Overridden in GCC and Clang
+ # mixins.
+ return ['-Werror']
+
+ def has_func_attribute(self, name: str, env: 'Environment') -> T.Tuple[bool, bool]:
+ # Just assume that if we're not on windows that dllimport and dllexport
+ # don't work
+ m = env.machines[self.for_machine]
+ if not (m.is_windows() or m.is_cygwin()):
+ if name in ['dllimport', 'dllexport']:
+ return False, False
+
+ return self.compiles(self.attribute_check_func(name), env,
+ extra_args=self.get_has_func_attribute_extra_args(name))
+
+ def get_disable_assert_args(self) -> T.List[str]:
+ return ['-DNDEBUG']
diff --git a/meson/mesonbuild/compilers/mixins/compcert.py b/meson/mesonbuild/compilers/mixins/compcert.py
new file mode 100644
index 000000000..3211f6af2
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/compcert.py
@@ -0,0 +1,131 @@
+# Copyright 2012-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Representations specific to the CompCert C compiler family."""
+
+import os
+import re
+import typing as T
+
+if T.TYPE_CHECKING:
+ from ...environment import Environment
+ from ...compilers.compilers import Compiler
+else:
+ # This is a bit clever, for mypy we pretend that these mixins descend from
+ # Compiler, so we get all of the methods and attributes defined for us, but
+ # for runtime we make them descend from object (which all classes normally
+ # do). This gives up DRYer type checking, with no runtime impact
+ Compiler = object
+
+ccomp_buildtype_args = {
+ 'plain': [''],
+ 'debug': ['-O0', '-g'],
+ 'debugoptimized': ['-O0', '-g'],
+ 'release': ['-03'],
+ 'minsize': ['-Os'],
+ 'custom': ['-Obranchless'],
+} # type: T.Dict[str, T.List[str]]
+
+ccomp_optimization_args = {
+ '0': ['-O0'],
+ 'g': ['-O0'],
+ '1': ['-O1'],
+ '2': ['-O2'],
+ '3': ['-O3'],
+ 's': ['-Os']
+} # type: T.Dict[str, T.List[str]]
+
+ccomp_debug_args = {
+ False: [],
+ True: ['-g']
+} # type: T.Dict[bool, T.List[str]]
+
+# As of CompCert 20.04, these arguments should be passed to the underlying gcc linker (via -WUl,<arg>)
+# There are probably (many) more, but these are those used by picolibc
+ccomp_args_to_wul = [
+ r"^-ffreestanding$",
+ r"^-r$"
+] # type: T.List[str]
+
+class CompCertCompiler(Compiler):
+
+ def __init__(self) -> None:
+ self.id = 'ccomp'
+ # Assembly
+ self.can_compile_suffixes.add('s')
+ default_warn_args = [] # type: T.List[str]
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + [],
+ '3': default_warn_args + []} # type: T.Dict[str, T.List[str]]
+
+ def get_always_args(self) -> T.List[str]:
+ return []
+
+ def get_pic_args(self) -> T.List[str]:
+ # As of now, CompCert does not support PIC
+ return []
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return ccomp_buildtype_args[buildtype]
+
+ def get_pch_suffix(self) -> str:
+ return 'pch'
+
+ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+ return []
+
+ def unix_args_to_native(self, args: T.List[str]) -> T.List[str]:
+ "Always returns a copy that can be independently mutated"
+ patched_args = [] # type: T.List[str]
+ for arg in args:
+ added = 0
+ for ptrn in ccomp_args_to_wul:
+ if re.match(ptrn, arg):
+ patched_args.append('-WUl,' + arg)
+ added = 1
+ if not added:
+ patched_args.append(arg)
+ return patched_args
+
+ def thread_flags(self, env: 'Environment') -> T.List[str]:
+ return []
+
+ def get_preprocess_only_args(self) -> T.List[str]:
+ return ['-E']
+
+ def get_compile_only_args(self) -> T.List[str]:
+ return ['-c']
+
+ def get_coverage_args(self) -> T.List[str]:
+ return []
+
+ def get_no_stdinc_args(self) -> T.List[str]:
+ return ['-nostdinc']
+
+ def get_no_stdlib_link_args(self) -> T.List[str]:
+ return ['-nostdlib']
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return ccomp_optimization_args[optimization_level]
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ return ccomp_debug_args[is_debug]
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i[:9] == '-I':
+ parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+
+ return parameter_list
diff --git a/meson/mesonbuild/compilers/mixins/elbrus.py b/meson/mesonbuild/compilers/mixins/elbrus.py
new file mode 100644
index 000000000..16f621005
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/elbrus.py
@@ -0,0 +1,82 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Abstractions for the Elbrus family of compilers."""
+
+import os
+import typing as T
+import subprocess
+import re
+
+from .gnu import GnuLikeCompiler
+from .gnu import gnu_optimization_args
+from ...mesonlib import Popen_safe, OptionKey
+
+if T.TYPE_CHECKING:
+ from ...environment import Environment
+
+
+class ElbrusCompiler(GnuLikeCompiler):
+ # Elbrus compiler is nearly like GCC, but does not support
+ # PCH, LTO, sanitizers and color output as of version 1.21.x.
+
+ def __init__(self) -> None:
+ super().__init__()
+ self.id = 'lcc'
+ self.base_options = {OptionKey(o) for o in ['b_pgo', 'b_coverage', 'b_ndebug', 'b_staticpic', 'b_lundef', 'b_asneeded']}
+
+ # FIXME: use _build_wrapper to call this so that linker flags from the env
+ # get applied
+ def get_library_dirs(self, env: 'Environment', elf_class: T.Optional[int] = None) -> T.List[str]:
+ os_env = os.environ.copy()
+ os_env['LC_ALL'] = 'C'
+ stdo = Popen_safe(self.exelist + ['--print-search-dirs'], env=os_env)[1]
+ for line in stdo.split('\n'):
+ if line.startswith('libraries:'):
+ # lcc does not include '=' in --print-search-dirs output. Also it could show nonexistent dirs.
+ libstr = line.split(' ', 1)[1]
+ return [os.path.realpath(p) for p in libstr.split(':') if os.path.exists(p)]
+ return []
+
+ def get_program_dirs(self, env: 'Environment') -> T.List[str]:
+ os_env = os.environ.copy()
+ os_env['LC_ALL'] = 'C'
+ stdo = Popen_safe(self.exelist + ['--print-search-dirs'], env=os_env)[1]
+ for line in stdo.split('\n'):
+ if line.startswith('programs:'):
+ # lcc does not include '=' in --print-search-dirs output.
+ libstr = line.split(' ', 1)[1]
+ return [os.path.realpath(p) for p in libstr.split(':')]
+ return []
+
+ def get_default_include_dirs(self) -> T.List[str]:
+ os_env = os.environ.copy()
+ os_env['LC_ALL'] = 'C'
+ p = subprocess.Popen(self.exelist + ['-xc', '-E', '-v', '-'], env=os_env, stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stderr = p.stderr.read().decode('utf-8', errors='replace')
+ includes = []
+ for line in stderr.split('\n'):
+ if line.lstrip().startswith('--sys_include'):
+ includes.append(re.sub(r'\s*\\$', '', re.sub(r'^\s*--sys_include\s*', '', line)))
+ return includes
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return gnu_optimization_args[optimization_level]
+
+ def get_pch_suffix(self) -> str:
+ # Actually it's not supported for now, but probably will be supported in future
+ return 'pch'
+
+ def openmp_flags(self) -> T.List[str]:
+ return ['-fopenmp']
diff --git a/meson/mesonbuild/compilers/mixins/emscripten.py b/meson/mesonbuild/compilers/mixins/emscripten.py
new file mode 100644
index 000000000..226cc1531
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/emscripten.py
@@ -0,0 +1,69 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Provides a mixin for shared code between C and C++ Emscripten compilers."""
+
+import os.path
+import typing as T
+
+from ... import coredata
+from ...mesonlib import OptionKey
+
+if T.TYPE_CHECKING:
+ from ...environment import Environment
+ from ...compilers.compilers import Compiler
+else:
+ # This is a bit clever, for mypy we pretend that these mixins descend from
+ # Compiler, so we get all of the methods and attributes defined for us, but
+ # for runtime we make them descend from object (which all classes normally
+ # do). This gives up DRYer type checking, with no runtime impact
+ Compiler = object
+
+
+class EmscriptenMixin(Compiler):
+
+ def _get_compile_output(self, dirname: str, mode: str) -> str:
+ # In pre-processor mode, the output is sent to stdout and discarded
+ if mode == 'preprocess':
+ return None
+ # Unlike sane toolchains, emcc infers the kind of output from its name.
+ # This is the only reason why this method is overridden; compiler tests
+ # do not work well with the default exe/obj suffices.
+ if mode == 'link':
+ suffix = 'js'
+ else:
+ suffix = 'o'
+ return os.path.join(dirname, 'output.' + suffix)
+
+ def thread_flags(self, env: 'Environment') -> T.List[str]:
+ return ['-s', 'USE_PTHREADS=1']
+
+ def thread_link_flags(self, env: 'Environment') -> T.List[str]:
+ args = ['-s', 'USE_PTHREADS=1']
+ count: int = env.coredata.options[OptionKey('thread_count', lang=self.language, machine=self.for_machine)].value
+ if count:
+ args.extend(['-s', f'PTHREAD_POOL_SIZE={count}'])
+ return args
+
+ def get_options(self) -> 'coredata.KeyedOptionDictType':
+ opts = super().get_options()
+ key = OptionKey('thread_count', machine=self.for_machine, lang=self.language)
+ opts.update({
+ key: coredata.UserIntegerOption(
+ 'Number of threads to use in web assembly, set to 0 to disable',
+ (0, None, 4), # Default was picked at random
+ ),
+ })
+
+ return opts
diff --git a/meson/mesonbuild/compilers/mixins/gnu.py b/meson/mesonbuild/compilers/mixins/gnu.py
new file mode 100644
index 000000000..bc40af494
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/gnu.py
@@ -0,0 +1,398 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Provides mixins for GNU compilers and GNU-like compilers."""
+
+import abc
+import functools
+import os
+import multiprocessing
+import pathlib
+import re
+import subprocess
+import typing as T
+
+from ... import mesonlib
+from ... import mlog
+from ...mesonlib import OptionKey
+
+if T.TYPE_CHECKING:
+ from ..._typing import ImmutableListProtocol
+ from ...environment import Environment
+ from ..compilers import Compiler
+else:
+ # This is a bit clever, for mypy we pretend that these mixins descend from
+ # Compiler, so we get all of the methods and attributes defined for us, but
+ # for runtime we make them descend from object (which all classes normally
+ # do). This gives up DRYer type checking, with no runtime impact
+ Compiler = object
+
+# XXX: prevent circular references.
+# FIXME: this really is a posix interface not a c-like interface
+clike_debug_args = {
+ False: [],
+ True: ['-g'],
+} # type: T.Dict[bool, T.List[str]]
+
+gnulike_buildtype_args = {
+ 'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
+ 'custom': [],
+} # type: T.Dict[str, T.List[str]]
+
+gnu_optimization_args = {
+ '0': [],
+ 'g': ['-Og'],
+ '1': ['-O1'],
+ '2': ['-O2'],
+ '3': ['-O3'],
+ 's': ['-Os'],
+} # type: T.Dict[str, T.List[str]]
+
+gnulike_instruction_set_args = {
+ 'mmx': ['-mmmx'],
+ 'sse': ['-msse'],
+ 'sse2': ['-msse2'],
+ 'sse3': ['-msse3'],
+ 'ssse3': ['-mssse3'],
+ 'sse41': ['-msse4.1'],
+ 'sse42': ['-msse4.2'],
+ 'avx': ['-mavx'],
+ 'avx2': ['-mavx2'],
+ 'neon': ['-mfpu=neon'],
+} # type: T.Dict[str, T.List[str]]
+
+gnu_symbol_visibility_args = {
+ '': [],
+ 'default': ['-fvisibility=default'],
+ 'internal': ['-fvisibility=internal'],
+ 'hidden': ['-fvisibility=hidden'],
+ 'protected': ['-fvisibility=protected'],
+ 'inlineshidden': ['-fvisibility=hidden', '-fvisibility-inlines-hidden'],
+} # type: T.Dict[str, T.List[str]]
+
+gnu_color_args = {
+ 'auto': ['-fdiagnostics-color=auto'],
+ 'always': ['-fdiagnostics-color=always'],
+ 'never': ['-fdiagnostics-color=never'],
+} # type: T.Dict[str, T.List[str]]
+
+
+@functools.lru_cache(maxsize=None)
+def gnulike_default_include_dirs(compiler: T.Tuple[str, ...], lang: str) -> 'ImmutableListProtocol[str]':
+ lang_map = {
+ 'c': 'c',
+ 'cpp': 'c++',
+ 'objc': 'objective-c',
+ 'objcpp': 'objective-c++'
+ }
+ if lang not in lang_map:
+ return []
+ lang = lang_map[lang]
+ env = os.environ.copy()
+ env["LC_ALL"] = 'C'
+ cmd = list(compiler) + [f'-x{lang}', '-E', '-v', '-']
+ p = subprocess.Popen(
+ cmd,
+ stdin=subprocess.DEVNULL,
+ stderr=subprocess.STDOUT,
+ stdout=subprocess.PIPE,
+ env=env
+ )
+ stdout = p.stdout.read().decode('utf-8', errors='replace')
+ parse_state = 0
+ paths = [] # type: T.List[str]
+ for line in stdout.split('\n'):
+ line = line.strip(' \n\r\t')
+ if parse_state == 0:
+ if line == '#include "..." search starts here:':
+ parse_state = 1
+ elif parse_state == 1:
+ if line == '#include <...> search starts here:':
+ parse_state = 2
+ else:
+ paths.append(line)
+ elif parse_state == 2:
+ if line == 'End of search list.':
+ break
+ else:
+ paths.append(line)
+ if not paths:
+ mlog.warning('No include directory found parsing "{cmd}" output'.format(cmd=" ".join(cmd)))
+ # Append a normalized copy of paths to make path lookup easier
+ paths += [os.path.normpath(x) for x in paths]
+ return paths
+
+
+class GnuLikeCompiler(Compiler, metaclass=abc.ABCMeta):
+ """
+ GnuLikeCompiler is a common interface to all compilers implementing
+ the GNU-style commandline interface. This includes GCC, Clang
+ and ICC. Certain functionality between them is different and requires
+ that the actual concrete subclass define their own implementation.
+ """
+
+ LINKER_PREFIX = '-Wl,'
+
+ def __init__(self) -> None:
+ self.base_options = {
+ OptionKey(o) for o in ['b_pch', 'b_lto', 'b_pgo', 'b_coverage',
+ 'b_ndebug', 'b_staticpic', 'b_pie']}
+ if not (self.info.is_windows() or self.info.is_cygwin() or self.info.is_openbsd()):
+ self.base_options.add(OptionKey('b_lundef'))
+ if not self.info.is_windows() or self.info.is_cygwin():
+ self.base_options.add(OptionKey('b_asneeded'))
+ if not self.info.is_hurd():
+ self.base_options.add(OptionKey('b_sanitize'))
+ # All GCC-like backends can do assembly
+ self.can_compile_suffixes.add('s')
+
+ def get_pic_args(self) -> T.List[str]:
+ if self.info.is_windows() or self.info.is_cygwin() or self.info.is_darwin():
+ return [] # On Window and OS X, pic is always on.
+ return ['-fPIC']
+
+ def get_pie_args(self) -> T.List[str]:
+ return ['-fPIE']
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return gnulike_buildtype_args[buildtype]
+
+ @abc.abstractmethod
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ pass
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ return clike_debug_args[is_debug]
+
+ @abc.abstractmethod
+ def get_pch_suffix(self) -> str:
+ pass
+
+ def split_shlib_to_parts(self, fname: str) -> T.Tuple[str, str]:
+ return os.path.dirname(fname), fname
+
+ def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]:
+ return gnulike_instruction_set_args.get(instruction_set, None)
+
+ def get_default_include_dirs(self) -> T.List[str]:
+ return gnulike_default_include_dirs(tuple(self.exelist), self.language).copy()
+
+ @abc.abstractmethod
+ def openmp_flags(self) -> T.List[str]:
+ pass
+
+ def gnu_symbol_visibility_args(self, vistype: str) -> T.List[str]:
+ return gnu_symbol_visibility_args[vistype]
+
+ def gen_vs_module_defs_args(self, defsfile: str) -> T.List[str]:
+ if not isinstance(defsfile, str):
+ raise RuntimeError('Module definitions file should be str')
+ # On Windows targets, .def files may be specified on the linker command
+ # line like an object file.
+ if self.info.is_windows() or self.info.is_cygwin():
+ return [defsfile]
+ # For other targets, discard the .def file.
+ return []
+
+ def get_argument_syntax(self) -> str:
+ return 'gcc'
+
+ def get_profile_generate_args(self) -> T.List[str]:
+ return ['-fprofile-generate']
+
+ def get_profile_use_args(self) -> T.List[str]:
+ return ['-fprofile-use', '-fprofile-correction']
+
+ def get_gui_app_args(self, value: bool) -> T.List[str]:
+ return ['-mwindows' if value else '-mconsole']
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-I' or i[:2] == '-L':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+ return parameter_list
+
+ @functools.lru_cache()
+ def _get_search_dirs(self, env: 'Environment') -> str:
+ extra_args = ['--print-search-dirs']
+ with self._build_wrapper('', env, extra_args=extra_args,
+ dependencies=None, mode='compile',
+ want_output=True) as p:
+ return p.stdout
+
+ def _split_fetch_real_dirs(self, pathstr: str) -> T.List[str]:
+ # We need to use the path separator used by the compiler for printing
+ # lists of paths ("gcc --print-search-dirs"). By default
+ # we assume it uses the platform native separator.
+ pathsep = os.pathsep
+
+ # clang uses ':' instead of ';' on Windows https://reviews.llvm.org/D61121
+ # so we need to repair things like 'C:\foo:C:\bar'
+ if pathsep == ';':
+ pathstr = re.sub(r':([^/\\])', r';\1', pathstr)
+
+ # pathlib treats empty paths as '.', so filter those out
+ paths = [p for p in pathstr.split(pathsep) if p]
+
+ result = []
+ for p in paths:
+ # GCC returns paths like this:
+ # /usr/lib/gcc/x86_64-linux-gnu/8/../../../../x86_64-linux-gnu/lib
+ # It would make sense to normalize them to get rid of the .. parts
+ # Sadly when you are on a merged /usr fs it also kills these:
+ # /lib/x86_64-linux-gnu
+ # since /lib is a symlink to /usr/lib. This would mean
+ # paths under /lib would be considered not a "system path",
+ # which is wrong and breaks things. Store everything, just to be sure.
+ pobj = pathlib.Path(p)
+ unresolved = pobj.as_posix()
+ if pobj.exists():
+ if unresolved not in result:
+ result.append(unresolved)
+ try:
+ resolved = pathlib.Path(p).resolve().as_posix()
+ if resolved not in result:
+ result.append(resolved)
+ except FileNotFoundError:
+ pass
+ return result
+
+ def get_compiler_dirs(self, env: 'Environment', name: str) -> T.List[str]:
+ '''
+ Get dirs from the compiler, either `libraries:` or `programs:`
+ '''
+ stdo = self._get_search_dirs(env)
+ for line in stdo.split('\n'):
+ if line.startswith(name + ':'):
+ return self._split_fetch_real_dirs(line.split('=', 1)[1])
+ return []
+
+ def get_lto_compile_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]:
+ # This provides a base for many compilers, GCC and Clang override this
+ # for their specific arguments
+ return ['-flto']
+
+ def sanitizer_compile_args(self, value: str) -> T.List[str]:
+ if value == 'none':
+ return []
+ args = ['-fsanitize=' + value]
+ if 'address' in value: # for -fsanitize=address,undefined
+ args.append('-fno-omit-frame-pointer')
+ return args
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return ['-o', target]
+
+ def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+ return ['-MD', '-MQ', outtarget, '-MF', outfile]
+
+ def get_compile_only_args(self) -> T.List[str]:
+ return ['-c']
+
+ def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+ if not path:
+ path = '.'
+ if is_system:
+ return ['-isystem' + path]
+ return ['-I' + path]
+
+ @classmethod
+ def use_linker_args(cls, linker: str) -> T.List[str]:
+ if linker not in {'gold', 'bfd', 'lld'}:
+ raise mesonlib.MesonException(
+ f'Unsupported linker, only bfd, gold, and lld are supported, not {linker}.')
+ return [f'-fuse-ld={linker}']
+
+ def get_coverage_args(self) -> T.List[str]:
+ return ['--coverage']
+
+
+class GnuCompiler(GnuLikeCompiler):
+ """
+ GnuCompiler represents an actual GCC in its many incarnations.
+ Compilers imitating GCC (Clang/Intel) should use the GnuLikeCompiler ABC.
+ """
+
+ def __init__(self, defines: T.Optional[T.Dict[str, str]]):
+ super().__init__()
+ self.id = 'gcc'
+ self.defines = defines or {}
+ self.base_options.update({OptionKey('b_colorout'), OptionKey('b_lto_threads')})
+
+ def get_colorout_args(self, colortype: str) -> T.List[str]:
+ if mesonlib.version_compare(self.version, '>=4.9.0'):
+ return gnu_color_args[colortype][:]
+ return []
+
+ def get_warn_args(self, level: str) -> T.List[str]:
+ # Mypy doesn't understand cooperative inheritance
+ args = super().get_warn_args(level)
+ if mesonlib.version_compare(self.version, '<4.8.0') and '-Wpedantic' in args:
+ # -Wpedantic was added in 4.8.0
+ # https://gcc.gnu.org/gcc-4.8/changes.html
+ args[args.index('-Wpedantic')] = '-pedantic'
+ return args
+
+ def has_builtin_define(self, define: str) -> bool:
+ return define in self.defines
+
+ def get_builtin_define(self, define: str) -> T.Optional[str]:
+ if define in self.defines:
+ return self.defines[define]
+ return None
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return gnu_optimization_args[optimization_level]
+
+ def get_pch_suffix(self) -> str:
+ return 'gch'
+
+ def openmp_flags(self) -> T.List[str]:
+ return ['-fopenmp']
+
+ def has_arguments(self, args: T.List[str], env: 'Environment', code: str,
+ mode: str) -> T.Tuple[bool, bool]:
+ # For some compiler command line arguments, the GNU compilers will
+ # emit a warning on stderr indicating that an option is valid for a
+ # another language, but still complete with exit_success
+ with self._build_wrapper(code, env, args, None, mode) as p:
+ result = p.returncode == 0
+ if self.language in {'cpp', 'objcpp'} and 'is valid for C/ObjC' in p.stderr:
+ result = False
+ if self.language in {'c', 'objc'} and 'is valid for C++/ObjC++' in p.stderr:
+ result = False
+ return result, p.cached
+
+ def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]:
+ # GCC only warns about unknown or ignored attributes, so force an
+ # error.
+ return ['-Werror=attributes']
+
+ def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.List[str]:
+ return ['-r', '-o', prelink_name] + obj_list
+
+ def get_lto_compile_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]:
+ if threads == 0:
+ if mesonlib.version_compare(self.version, '>= 10.0'):
+ return ['-flto=auto']
+ # This matches clang's behavior of using the number of cpus
+ return [f'-flto={multiprocessing.cpu_count()}']
+ elif threads > 0:
+ return [f'-flto={threads}']
+ return super().get_lto_compile_args(threads=threads)
diff --git a/meson/mesonbuild/compilers/mixins/intel.py b/meson/mesonbuild/compilers/mixins/intel.py
new file mode 100644
index 000000000..89f351854
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/intel.py
@@ -0,0 +1,189 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Abstractions for the Intel Compiler families.
+
+Intel provides both a posix/gcc-like compiler (ICC) for MacOS and Linux,
+with Meson mixin IntelGnuLikeCompiler.
+For Windows, the Intel msvc-like compiler (ICL) Meson mixin
+is IntelVisualStudioLikeCompiler.
+"""
+
+import os
+import typing as T
+
+from ... import mesonlib
+from ..compilers import CompileCheckMode
+from .gnu import GnuLikeCompiler
+from .visualstudio import VisualStudioLikeCompiler
+
+if T.TYPE_CHECKING:
+ from ...arglist import CompilerArgs
+ from ...dependencies import Dependency
+ from ...environment import Environment
+
+# XXX: avoid circular dependencies
+# TODO: this belongs in a posix compiler class
+# NOTE: the default Intel optimization is -O2, unlike GNU which defaults to -O0.
+# this can be surprising, particularly for debug builds, so we specify the
+# default as -O0.
+# https://software.intel.com/en-us/cpp-compiler-developer-guide-and-reference-o
+# https://software.intel.com/en-us/cpp-compiler-developer-guide-and-reference-g
+# https://software.intel.com/en-us/fortran-compiler-developer-guide-and-reference-o
+# https://software.intel.com/en-us/fortran-compiler-developer-guide-and-reference-g
+# https://software.intel.com/en-us/fortran-compiler-developer-guide-and-reference-traceback
+# https://gcc.gnu.org/onlinedocs/gcc/Optimize-Options.html
+
+
+class IntelGnuLikeCompiler(GnuLikeCompiler):
+ """
+ Tested on linux for ICC 14.0.3, 15.0.6, 16.0.4, 17.0.1, 19.0
+ debugoptimized: -g -O2
+ release: -O3
+ minsize: -O2
+ """
+
+ BUILD_ARGS = {
+ 'plain': [],
+ 'debug': ["-g", "-traceback"],
+ 'debugoptimized': ["-g", "-traceback"],
+ 'release': [],
+ 'minsize': [],
+ 'custom': [],
+ } # type: T.Dict[str, T.List[str]]
+
+ OPTIM_ARGS = {
+ '0': ['-O0'],
+ 'g': ['-O0'],
+ '1': ['-O1'],
+ '2': ['-O2'],
+ '3': ['-O3'],
+ 's': ['-Os'],
+ }
+
+ def __init__(self) -> None:
+ super().__init__()
+ # As of 19.0.0 ICC doesn't have sanitizer, color, or lto support.
+ #
+ # It does have IPO, which serves much the same purpose as LOT, but
+ # there is an unfortunate rule for using IPO (you can't control the
+ # name of the output file) which break assumptions meson makes
+ self.base_options = {mesonlib.OptionKey(o) for o in [
+ 'b_pch', 'b_lundef', 'b_asneeded', 'b_pgo', 'b_coverage',
+ 'b_ndebug', 'b_staticpic', 'b_pie']}
+ self.id = 'intel'
+ self.lang_header = 'none'
+
+ def get_pch_suffix(self) -> str:
+ return 'pchi'
+
+ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+ return ['-pch', '-pch_dir', os.path.join(pch_dir), '-x',
+ self.lang_header, '-include', header, '-x', 'none']
+
+ def get_pch_name(self, header_name: str) -> str:
+ return os.path.basename(header_name) + '.' + self.get_pch_suffix()
+
+ def openmp_flags(self) -> T.List[str]:
+ if mesonlib.version_compare(self.version, '>=15.0.0'):
+ return ['-qopenmp']
+ else:
+ return ['-openmp']
+
+ def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
+ extra_args = [
+ '-diag-error', '10006', # ignoring unknown option
+ '-diag-error', '10148', # Option not supported
+ '-diag-error', '10155', # ignoring argument required
+ '-diag-error', '10156', # ignoring not argument allowed
+ '-diag-error', '10157', # Ignoring argument of the wrong type
+ '-diag-error', '10158', # Argument must be separate. Can be hit by trying an option like -foo-bar=foo when -foo=bar is a valid option but -foo-bar isn't
+ ]
+ return super().get_compiler_check_args(mode) + extra_args
+
+ def get_profile_generate_args(self) -> T.List[str]:
+ return ['-prof-gen=threadsafe']
+
+ def get_profile_use_args(self) -> T.List[str]:
+ return ['-prof-use']
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return self.BUILD_ARGS[buildtype]
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return self.OPTIM_ARGS[optimization_level]
+
+ def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]:
+ return ['-diag-error', '1292']
+
+
+class IntelVisualStudioLikeCompiler(VisualStudioLikeCompiler):
+
+ """Abstractions for ICL, the Intel compiler on Windows."""
+
+ BUILD_ARGS = {
+ 'plain': [],
+ 'debug': ["/Zi", "/traceback"],
+ 'debugoptimized': ["/Zi", "/traceback"],
+ 'release': [],
+ 'minsize': [],
+ 'custom': [],
+ } # type: T.Dict[str, T.List[str]]
+
+ OPTIM_ARGS = {
+ '0': ['/Od'],
+ 'g': ['/Od'],
+ '1': ['/O1'],
+ '2': ['/O2'],
+ '3': ['/O3'],
+ 's': ['/Os'],
+ }
+
+ def __init__(self, target: str) -> None:
+ super().__init__(target)
+ self.id = 'intel-cl'
+
+ def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
+ args = super().get_compiler_check_args(mode)
+ if mode is not CompileCheckMode.LINK:
+ args.extend([
+ '/Qdiag-error:10006', # ignoring unknown option
+ '/Qdiag-error:10148', # Option not supported
+ '/Qdiag-error:10155', # ignoring argument required
+ '/Qdiag-error:10156', # ignoring not argument allowed
+ '/Qdiag-error:10157', # Ignoring argument of the wrong type
+ '/Qdiag-error:10158', # Argument must be separate. Can be hit by trying an option like -foo-bar=foo when -foo=bar is a valid option but -foo-bar isn't
+ ])
+ return args
+
+ def get_toolset_version(self) -> T.Optional[str]:
+ # ICL provides a cl.exe that returns the version of MSVC it tries to
+ # emulate, so we'll get the version from that and pass it to the same
+ # function the real MSVC uses to calculate the toolset version.
+ _, _, err = mesonlib.Popen_safe(['cl.exe'])
+ v1, v2, *_ = mesonlib.search_version(err).split('.')
+ version = int(v1 + v2)
+ return self._calculate_toolset_version(version)
+
+ def openmp_flags(self) -> T.List[str]:
+ return ['/Qopenmp']
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return self.BUILD_ARGS[buildtype]
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return self.OPTIM_ARGS[optimization_level]
+
+ def get_pch_base_name(self, header: str) -> str:
+ return os.path.basename(header) \ No newline at end of file
diff --git a/meson/mesonbuild/compilers/mixins/islinker.py b/meson/mesonbuild/compilers/mixins/islinker.py
new file mode 100644
index 000000000..4c29f8c0d
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/islinker.py
@@ -0,0 +1,129 @@
+# Copyright 2019 The Meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Mixins for compilers that *are* linkers.
+
+While many compilers (such as gcc and clang) are used by meson to dispatch
+linker commands and other (like MSVC) are not, a few (such as DMD) actually
+are both the linker and compiler in one binary. This module provides mixin
+classes for those cases.
+"""
+
+import typing as T
+
+from ...mesonlib import EnvironmentException, MesonException, is_windows
+
+if T.TYPE_CHECKING:
+ from ...coredata import KeyedOptionDictType
+ from ...environment import Environment
+ from ...compilers.compilers import Compiler
+else:
+ # This is a bit clever, for mypy we pretend that these mixins descend from
+ # Compiler, so we get all of the methods and attributes defined for us, but
+ # for runtime we make them descend from object (which all classes normally
+ # do). This gives up DRYer type checking, with no runtime impact
+ Compiler = object
+
+
+class BasicLinkerIsCompilerMixin(Compiler):
+
+ """Provides a baseline of methods that a linker would implement.
+
+ In every case this provides a "no" or "empty" answer. If a compiler
+ implements any of these it needs a different mixin or to override that
+ functionality itself.
+ """
+
+ def sanitizer_link_args(self, value: str) -> T.List[str]:
+ return []
+
+ def get_lto_link_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]:
+ return []
+
+ def can_linker_accept_rsp(self) -> bool:
+ return is_windows()
+
+ def get_linker_exelist(self) -> T.List[str]:
+ return self.exelist.copy()
+
+ def get_linker_output_args(self, output: str) -> T.List[str]:
+ return []
+
+ def get_linker_always_args(self) -> T.List[str]:
+ return []
+
+ def get_linker_lib_prefix(self) -> str:
+ return ''
+
+ def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return []
+
+ def has_multi_link_args(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
+ return False, False
+
+ def get_link_debugfile_args(self, targetfile: str) -> T.List[str]:
+ return []
+
+ def get_std_shared_lib_link_args(self) -> T.List[str]:
+ return []
+
+ def get_std_shared_module_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return self.get_std_shared_lib_link_args()
+
+ def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+ raise EnvironmentException(f'Linker {self.id} does not support link_whole')
+
+ def get_allow_undefined_link_args(self) -> T.List[str]:
+ raise EnvironmentException(f'Linker {self.id} does not support allow undefined')
+
+ def get_pie_link_args(self) -> T.List[str]:
+ raise EnvironmentException(f'Linker {self.id} does not support position-independent executable')
+
+ def get_undefined_link_args(self) -> T.List[str]:
+ return []
+
+ def get_coverage_link_args(self) -> T.List[str]:
+ return []
+
+ def no_undefined_link_args(self) -> T.List[str]:
+ return []
+
+ def bitcode_args(self) -> T.List[str]:
+ raise MesonException("This linker doesn't support bitcode bundles")
+
+ def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+ suffix: str, soversion: str,
+ darwin_versions: T.Tuple[str, str],
+ is_shared_module: bool) -> T.List[str]:
+ raise MesonException("This linker doesn't support soname args")
+
+ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+ rpath_paths: str, build_rpath: str,
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ return ([], set())
+
+ def get_asneeded_args(self) -> T.List[str]:
+ return []
+
+ def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]:
+ return []
+
+ def get_link_debugfile_name(self, target: str) -> str:
+ return ''
+
+ def thread_flags(self, env: 'Environment') -> T.List[str]:
+ return []
+
+ def thread_link_flags(self, env: 'Environment') -> T.List[str]:
+ return []
diff --git a/meson/mesonbuild/compilers/mixins/pgi.py b/meson/mesonbuild/compilers/mixins/pgi.py
new file mode 100644
index 000000000..51de8afa5
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/pgi.py
@@ -0,0 +1,109 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Abstractions for the PGI family of compilers."""
+
+import typing as T
+import os
+from pathlib import Path
+
+from ..compilers import clike_debug_args, clike_optimization_args
+from ...mesonlib import OptionKey
+
+if T.TYPE_CHECKING:
+ from ...environment import Environment
+ from ...compilers.compilers import Compiler
+else:
+ # This is a bit clever, for mypy we pretend that these mixins descend from
+ # Compiler, so we get all of the methods and attributes defined for us, but
+ # for runtime we make them descend from object (which all classes normally
+ # do). This gives up DRYer type checking, with no runtime impact
+ Compiler = object
+
+pgi_buildtype_args = {
+ 'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
+ 'custom': [],
+} # type: T.Dict[str, T.List[str]]
+
+
+class PGICompiler(Compiler):
+
+ def __init__(self) -> None:
+ self.base_options = {OptionKey('b_pch')}
+ self.id = 'pgi'
+
+ default_warn_args = ['-Minform=inform']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args,
+ '3': default_warn_args
+ } # type: T.Dict[str, T.List[str]]
+
+ def get_module_incdir_args(self) -> T.Tuple[str]:
+ return ('-module', )
+
+ def get_no_warn_args(self) -> T.List[str]:
+ return ['-silent']
+
+ def gen_import_library_args(self, implibname: str) -> T.List[str]:
+ return []
+
+ def get_pic_args(self) -> T.List[str]:
+ # PGI -fPIC is Linux only.
+ if self.info.is_linux():
+ return ['-fPIC']
+ return []
+
+ def openmp_flags(self) -> T.List[str]:
+ return ['-mp']
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return pgi_buildtype_args[buildtype]
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return clike_optimization_args[optimization_level]
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ return clike_debug_args[is_debug]
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-I' or i[:2] == '-L':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+ return parameter_list
+
+ def get_always_args(self) -> T.List[str]:
+ return []
+
+ def get_pch_suffix(self) -> str:
+ # PGI defaults to .pch suffix for PCH on Linux and Windows with --pch option
+ return 'pch'
+
+ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+ # PGI supports PCH for C++ only.
+ hdr = Path(pch_dir).resolve().parent / header
+ if self.language == 'cpp':
+ return ['--pch',
+ '--pch_dir', str(hdr.parent),
+ f'-I{hdr.parent}']
+ else:
+ return []
+
+ def thread_flags(self, env: 'Environment') -> T.List[str]:
+ # PGI cannot accept -pthread, it's already threaded
+ return []
diff --git a/meson/mesonbuild/compilers/mixins/visualstudio.py b/meson/mesonbuild/compilers/mixins/visualstudio.py
new file mode 100644
index 000000000..e911f64f4
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/visualstudio.py
@@ -0,0 +1,428 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Abstractions to simplify compilers that implement an MSVC compatible
+interface.
+"""
+
+import abc
+import os
+import typing as T
+
+from ... import arglist
+from ... import mesonlib
+from ... import mlog
+
+if T.TYPE_CHECKING:
+ from ...environment import Environment
+ from .clike import CLikeCompiler as Compiler
+else:
+ # This is a bit clever, for mypy we pretend that these mixins descend from
+ # Compiler, so we get all of the methods and attributes defined for us, but
+ # for runtime we make them descend from object (which all classes normally
+ # do). This gives up DRYer type checking, with no runtime impact
+ Compiler = object
+
+vs32_instruction_set_args = {
+ 'mmx': ['/arch:SSE'], # There does not seem to be a flag just for MMX
+ 'sse': ['/arch:SSE'],
+ 'sse2': ['/arch:SSE2'],
+ 'sse3': ['/arch:AVX'], # VS leaped from SSE2 directly to AVX.
+ 'sse41': ['/arch:AVX'],
+ 'sse42': ['/arch:AVX'],
+ 'avx': ['/arch:AVX'],
+ 'avx2': ['/arch:AVX2'],
+ 'neon': None,
+} # T.Dicst[str, T.Optional[T.List[str]]]
+
+# The 64 bit compiler defaults to /arch:avx.
+vs64_instruction_set_args = {
+ 'mmx': ['/arch:AVX'],
+ 'sse': ['/arch:AVX'],
+ 'sse2': ['/arch:AVX'],
+ 'sse3': ['/arch:AVX'],
+ 'ssse3': ['/arch:AVX'],
+ 'sse41': ['/arch:AVX'],
+ 'sse42': ['/arch:AVX'],
+ 'avx': ['/arch:AVX'],
+ 'avx2': ['/arch:AVX2'],
+ 'neon': None,
+} # T.Dicst[str, T.Optional[T.List[str]]]
+
+msvc_optimization_args = {
+ '0': ['/Od'],
+ 'g': [], # No specific flag to optimize debugging, /Zi or /ZI will create debug information
+ '1': ['/O1'],
+ '2': ['/O2'],
+ '3': ['/O2', '/Gw'],
+ 's': ['/O1', '/Gw'],
+} # type: T.Dict[str, T.List[str]]
+
+msvc_debug_args = {
+ False: [],
+ True: ['/Zi']
+} # type: T.Dict[bool, T.List[str]]
+
+
+class VisualStudioLikeCompiler(Compiler, metaclass=abc.ABCMeta):
+
+ """A common interface for all compilers implementing an MSVC-style
+ interface.
+
+ A number of compilers attempt to mimic MSVC, with varying levels of
+ success, such as Clang-CL and ICL (the Intel C/C++ Compiler for Windows).
+ This class implements as much common logic as possible.
+ """
+
+ std_warn_args = ['/W3']
+ std_opt_args = ['/O2']
+ ignore_libs = arglist.UNIXY_COMPILER_INTERNAL_LIBS + ['execinfo']
+ internal_libs = [] # type: T.List[str]
+
+ crt_args = {
+ 'none': [],
+ 'md': ['/MD'],
+ 'mdd': ['/MDd'],
+ 'mt': ['/MT'],
+ 'mtd': ['/MTd'],
+ } # type: T.Dict[str, T.List[str]]
+
+ # /showIncludes is needed for build dependency tracking in Ninja
+ # See: https://ninja-build.org/manual.html#_deps
+ always_args = ['/nologo', '/showIncludes']
+ warn_args = {
+ '0': [],
+ '1': ['/W2'],
+ '2': ['/W3'],
+ '3': ['/W4'],
+ } # type: T.Dict[str, T.List[str]]
+
+ INVOKES_LINKER = False
+
+ def __init__(self, target: str):
+ self.base_options = {mesonlib.OptionKey(o) for o in ['b_pch', 'b_ndebug', 'b_vscrt']} # FIXME add lto, pgo and the like
+ self.target = target
+ self.is_64 = ('x64' in target) or ('x86_64' in target)
+ # do some canonicalization of target machine
+ if 'x86_64' in target:
+ self.machine = 'x64'
+ elif '86' in target:
+ self.machine = 'x86'
+ elif 'aarch64' in target:
+ self.machine = 'arm64'
+ elif 'arm' in target:
+ self.machine = 'arm'
+ else:
+ self.machine = target
+ if mesonlib.version_compare(self.version, '>=19.28.29910'): # VS 16.9.0 includes cl 19.28.29910
+ self.base_options.add(mesonlib.OptionKey('b_sanitize'))
+ assert self.linker is not None
+ self.linker.machine = self.machine
+
+ # Override CCompiler.get_always_args
+ def get_always_args(self) -> T.List[str]:
+ return self.always_args
+
+ def get_pch_suffix(self) -> str:
+ return 'pch'
+
+ def get_pch_name(self, header: str) -> str:
+ chopped = os.path.basename(header).split('.')[:-1]
+ chopped.append(self.get_pch_suffix())
+ pchname = '.'.join(chopped)
+ return pchname
+
+ def get_pch_base_name(self, header: str) -> str:
+ # This needs to be implemented by inherting classes
+ raise NotImplementedError
+
+ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+ base = self.get_pch_base_name(header)
+ pchname = self.get_pch_name(header)
+ return ['/FI' + base, '/Yu' + base, '/Fp' + os.path.join(pch_dir, pchname)]
+
+ def get_preprocess_only_args(self) -> T.List[str]:
+ return ['/EP']
+
+ def get_compile_only_args(self) -> T.List[str]:
+ return ['/c']
+
+ def get_no_optimization_args(self) -> T.List[str]:
+ return ['/Od','/Oi-']
+
+ def sanitizer_compile_args(self, value: str) -> T.List[str]:
+ if value == 'none':
+ return []
+ if value != 'address':
+ raise mesonlib.MesonException('VS only supports address sanitizer at the moment.')
+ return ['/fsanitize=address']
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ if target.endswith('.exe'):
+ return ['/Fe' + target]
+ return ['/Fo' + target]
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return []
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ return msvc_debug_args[is_debug]
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ args = msvc_optimization_args[optimization_level]
+ if mesonlib.version_compare(self.version, '<18.0'):
+ args = [arg for arg in args if arg != '/Gw']
+ return args
+
+ def linker_to_compiler_args(self, args: T.List[str]) -> T.List[str]:
+ return ['/link'] + args
+
+ def get_pic_args(self) -> T.List[str]:
+ return [] # PIC is handled by the loader on Windows
+
+ def gen_vs_module_defs_args(self, defsfile: str) -> T.List[str]:
+ if not isinstance(defsfile, str):
+ raise RuntimeError('Module definitions file should be str')
+ # With MSVC, DLLs only export symbols that are explicitly exported,
+ # so if a module defs file is specified, we use that to export symbols
+ return ['/DEF:' + defsfile]
+
+ def gen_pch_args(self, header: str, source: str, pchname: str) -> T.Tuple[str, T.List[str]]:
+ objname = os.path.splitext(pchname)[0] + '.obj'
+ return objname, ['/Yc' + header, '/Fp' + pchname, '/Fo' + objname]
+
+ def openmp_flags(self) -> T.List[str]:
+ return ['/openmp']
+
+ def openmp_link_flags(self) -> T.List[str]:
+ return []
+
+ # FIXME, no idea what these should be.
+ def thread_flags(self, env: 'Environment') -> T.List[str]:
+ return []
+
+ @classmethod
+ def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]:
+ result = []
+ for i in args:
+ # -mms-bitfields is specific to MinGW-GCC
+ # -pthread is only valid for GCC
+ if i in ('-mms-bitfields', '-pthread'):
+ continue
+ if i.startswith('-LIBPATH:'):
+ i = '/LIBPATH:' + i[9:]
+ elif i.startswith('-L'):
+ i = '/LIBPATH:' + i[2:]
+ # Translate GNU-style -lfoo library name to the import library
+ elif i.startswith('-l'):
+ name = i[2:]
+ if name in cls.ignore_libs:
+ # With MSVC, these are provided by the C runtime which is
+ # linked in by default
+ continue
+ else:
+ i = name + '.lib'
+ elif i.startswith('-isystem'):
+ # just use /I for -isystem system include path s
+ if i.startswith('-isystem='):
+ i = '/I' + i[9:]
+ else:
+ i = '/I' + i[8:]
+ elif i.startswith('-idirafter'):
+ # same as -isystem, but appends the path instead
+ if i.startswith('-idirafter='):
+ i = '/I' + i[11:]
+ else:
+ i = '/I' + i[10:]
+ # -pthread in link flags is only used on Linux
+ elif i == '-pthread':
+ continue
+ result.append(i)
+ return result
+
+ @classmethod
+ def native_args_to_unix(cls, args: T.List[str]) -> T.List[str]:
+ result = []
+ for arg in args:
+ if arg.startswith(('/LIBPATH:', '-LIBPATH:')):
+ result.append('-L' + arg[9:])
+ elif arg.endswith(('.a', '.lib')) and not os.path.isabs(arg):
+ result.append('-l' + arg)
+ else:
+ result.append(arg)
+ return result
+
+ def get_werror_args(self) -> T.List[str]:
+ return ['/WX']
+
+ def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+ if path == '':
+ path = '.'
+ # msvc does not have a concept of system header dirs.
+ return ['-I' + path]
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-I' or i[:2] == '/I':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+ elif i[:9] == '/LIBPATH:':
+ parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+
+ return parameter_list
+
+ # Visual Studio is special. It ignores some arguments it does not
+ # understand and you can't tell it to error out on those.
+ # http://stackoverflow.com/questions/15259720/how-can-i-make-the-microsoft-c-compiler-treat-unknown-flags-as-errors-rather-t
+ def has_arguments(self, args: T.List[str], env: 'Environment', code: str, mode: str) -> T.Tuple[bool, bool]:
+ warning_text = '4044' if mode == 'link' else '9002'
+ with self._build_wrapper(code, env, extra_args=args, mode=mode) as p:
+ if p.returncode != 0:
+ return False, p.cached
+ return not(warning_text in p.stderr or warning_text in p.stdout), p.cached
+
+ def get_compile_debugfile_args(self, rel_obj: str, pch: bool = False) -> T.List[str]:
+ pdbarr = rel_obj.split('.')[:-1]
+ pdbarr += ['pdb']
+ args = ['/Fd' + '.'.join(pdbarr)]
+ return args
+
+ def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]:
+ if self.is_64:
+ return vs64_instruction_set_args.get(instruction_set, None)
+ return vs32_instruction_set_args.get(instruction_set, None)
+
+ def _calculate_toolset_version(self, version: int) -> T.Optional[str]:
+ if version < 1310:
+ return '7.0'
+ elif version < 1400:
+ return '7.1' # (Visual Studio 2003)
+ elif version < 1500:
+ return '8.0' # (Visual Studio 2005)
+ elif version < 1600:
+ return '9.0' # (Visual Studio 2008)
+ elif version < 1700:
+ return '10.0' # (Visual Studio 2010)
+ elif version < 1800:
+ return '11.0' # (Visual Studio 2012)
+ elif version < 1900:
+ return '12.0' # (Visual Studio 2013)
+ elif version < 1910:
+ return '14.0' # (Visual Studio 2015)
+ elif version < 1920:
+ return '14.1' # (Visual Studio 2017)
+ elif version < 1930:
+ return '14.2' # (Visual Studio 2019)
+ mlog.warning(f'Could not find toolset for version {self.version!r}')
+ return None
+
+ def get_toolset_version(self) -> T.Optional[str]:
+ # See boost/config/compiler/visualc.cpp for up to date mapping
+ try:
+ version = int(''.join(self.version.split('.')[0:2]))
+ except ValueError:
+ return None
+ return self._calculate_toolset_version(version)
+
+ def get_default_include_dirs(self) -> T.List[str]:
+ if 'INCLUDE' not in os.environ:
+ return []
+ return os.environ['INCLUDE'].split(os.pathsep)
+
+ def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+ if crt_val in self.crt_args:
+ return self.crt_args[crt_val]
+ assert(crt_val in ['from_buildtype', 'static_from_buildtype'])
+ dbg = 'mdd'
+ rel = 'md'
+ if crt_val == 'static_from_buildtype':
+ dbg = 'mtd'
+ rel = 'mt'
+ # Match what build type flags used to do.
+ if buildtype == 'plain':
+ return []
+ elif buildtype == 'debug':
+ return self.crt_args[dbg]
+ elif buildtype == 'debugoptimized':
+ return self.crt_args[rel]
+ elif buildtype == 'release':
+ return self.crt_args[rel]
+ elif buildtype == 'minsize':
+ return self.crt_args[rel]
+ else:
+ assert(buildtype == 'custom')
+ raise mesonlib.EnvironmentException('Requested C runtime based on buildtype, but buildtype is "custom".')
+
+ def has_func_attribute(self, name: str, env: 'Environment') -> T.Tuple[bool, bool]:
+ # MSVC doesn't have __attribute__ like Clang and GCC do, so just return
+ # false without compiling anything
+ return name in ['dllimport', 'dllexport'], False
+
+ def get_argument_syntax(self) -> str:
+ return 'msvc'
+
+
+class MSVCCompiler(VisualStudioLikeCompiler):
+
+ """Spcific to the Microsoft Compilers."""
+
+ def __init__(self, target: str):
+ super().__init__(target)
+ self.id = 'msvc'
+
+ def get_compile_debugfile_args(self, rel_obj: str, pch: bool = False) -> T.List[str]:
+ args = super().get_compile_debugfile_args(rel_obj, pch)
+ # When generating a PDB file with PCH, all compile commands write
+ # to the same PDB file. Hence, we need to serialize the PDB
+ # writes using /FS since we do parallel builds. This slows down the
+ # build obviously, which is why we only do this when PCH is on.
+ # This was added in Visual Studio 2013 (MSVC 18.0). Before that it was
+ # always on: https://msdn.microsoft.com/en-us/library/dn502518.aspx
+ if pch and mesonlib.version_compare(self.version, '>=18.0'):
+ args = ['/FS'] + args
+ return args
+
+ def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]:
+ if self.version.split('.')[0] == '16' and instruction_set == 'avx':
+ # VS documentation says that this exists and should work, but
+ # it does not. The headers do not contain AVX intrinsics
+ # and they can not be called.
+ return None
+ return super().get_instruction_set_args(instruction_set)
+
+ def get_pch_base_name(self, header: str) -> str:
+ return os.path.basename(header)
+
+
+class ClangClCompiler(VisualStudioLikeCompiler):
+
+ """Spcific to Clang-CL."""
+
+ def __init__(self, target: str):
+ super().__init__(target)
+ self.id = 'clang-cl'
+
+ # Assembly
+ self.can_compile_suffixes.add('s')
+
+ def has_arguments(self, args: T.List[str], env: 'Environment', code: str, mode: str) -> T.Tuple[bool, bool]:
+ if mode != 'link':
+ args = args + ['-Werror=unknown-argument']
+ return super().has_arguments(args, env, code, mode)
+
+ def get_toolset_version(self) -> T.Optional[str]:
+ # XXX: what is the right thing to do here?
+ return '14.1'
+
+ def get_pch_base_name(self, header: str) -> str:
+ return header
diff --git a/meson/mesonbuild/compilers/mixins/xc16.py b/meson/mesonbuild/compilers/mixins/xc16.py
new file mode 100644
index 000000000..77c4690ff
--- /dev/null
+++ b/meson/mesonbuild/compilers/mixins/xc16.py
@@ -0,0 +1,127 @@
+# Copyright 2012-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Representations specific to the Microchip XC16 C compiler family."""
+
+import os
+import typing as T
+
+from ...mesonlib import EnvironmentException
+
+if T.TYPE_CHECKING:
+ from ...environment import Environment
+ from ...compilers.compilers import Compiler
+else:
+ # This is a bit clever, for mypy we pretend that these mixins descend from
+ # Compiler, so we get all of the methods and attributes defined for us, but
+ # for runtime we make them descend from object (which all classes normally
+ # do). This gives up DRYer type checking, with no runtime impact
+ Compiler = object
+
+xc16_buildtype_args = {
+ 'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
+ 'custom': [],
+} # type: T.Dict[str, T.List[str]]
+
+xc16_optimization_args = {
+ '0': ['-O0'],
+ 'g': ['-O0'],
+ '1': ['-O1'],
+ '2': ['-O2'],
+ '3': ['-O3'],
+ 's': ['-Os']
+} # type: T.Dict[str, T.List[str]]
+
+xc16_debug_args = {
+ False: [],
+ True: []
+} # type: T.Dict[bool, T.List[str]]
+
+
+class Xc16Compiler(Compiler):
+
+ def __init__(self) -> None:
+ if not self.is_cross:
+ raise EnvironmentException('xc16 supports only cross-compilation.')
+ self.id = 'xc16'
+ # Assembly
+ self.can_compile_suffixes.add('s')
+ default_warn_args = [] # type: T.List[str]
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + [],
+ '3': default_warn_args + []} # type: T.Dict[str, T.List[str]]
+
+ def get_always_args(self) -> T.List[str]:
+ return []
+
+ def get_pic_args(self) -> T.List[str]:
+ # PIC support is not enabled by default for xc16,
+ # if users want to use it, they need to add the required arguments explicitly
+ return []
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return xc16_buildtype_args[buildtype]
+
+ def get_pch_suffix(self) -> str:
+ return 'pch'
+
+ def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+ return []
+
+ def thread_flags(self, env: 'Environment') -> T.List[str]:
+ return []
+
+ def get_coverage_args(self) -> T.List[str]:
+ return []
+
+ def get_no_stdinc_args(self) -> T.List[str]:
+ return ['-nostdinc']
+
+ def get_no_stdlib_link_args(self) -> T.List[str]:
+ return ['--nostdlib']
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return xc16_optimization_args[optimization_level]
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ return xc16_debug_args[is_debug]
+
+ @classmethod
+ def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]:
+ result = []
+ for i in args:
+ if i.startswith('-D'):
+ i = '-D' + i[2:]
+ if i.startswith('-I'):
+ i = '-I' + i[2:]
+ if i.startswith('-Wl,-rpath='):
+ continue
+ elif i == '--print-search-dirs':
+ continue
+ elif i.startswith('-L'):
+ continue
+ result.append(i)
+ return result
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i[:9] == '-I':
+ parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+
+ return parameter_list
diff --git a/meson/mesonbuild/compilers/objc.py b/meson/mesonbuild/compilers/objc.py
new file mode 100644
index 000000000..7afa44f41
--- /dev/null
+++ b/meson/mesonbuild/compilers/objc.py
@@ -0,0 +1,108 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import typing as T
+
+from .. import coredata
+from ..mesonlib import MachineChoice, OptionKey
+
+from .compilers import Compiler
+from .mixins.clike import CLikeCompiler
+from .mixins.gnu import GnuCompiler
+from .mixins.clang import ClangCompiler
+
+if T.TYPE_CHECKING:
+ from ..programs import ExternalProgram
+ from ..envconfig import MachineInfo
+ from ..environment import Environment
+ from ..linkers import DynamicLinker
+
+
+class ObjCCompiler(CLikeCompiler, Compiler):
+
+ language = 'objc'
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo',
+ exe_wrap: T.Optional['ExternalProgram'],
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ Compiler.__init__(self, exelist, version, for_machine, info,
+ is_cross=is_cross, full_version=full_version,
+ linker=linker)
+ CLikeCompiler.__init__(self, exe_wrap)
+
+ @staticmethod
+ def get_display_language() -> str:
+ return 'Objective-C'
+
+ def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+ code = '#import<stddef.h>\nint main(void) { return 0; }\n'
+ return self._sanity_check_impl(work_dir, environment, 'sanitycheckobjc.m', code)
+
+
+class GnuObjCCompiler(GnuCompiler, ObjCCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo',
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ defines: T.Optional[T.Dict[str, str]] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ ObjCCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ GnuCompiler.__init__(self, defines)
+ default_warn_args = ['-Wall', '-Winvalid-pch']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+
+class ClangObjCCompiler(ClangCompiler, ObjCCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo',
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ defines: T.Optional[T.Dict[str, str]] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ ObjCCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ ClangCompiler.__init__(self, defines)
+ default_warn_args = ['-Wall', '-Winvalid-pch']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+ def get_options(self) -> 'coredata.KeyedOptionDictType':
+ opts = super().get_options()
+ opts.update({
+ OptionKey('std', machine=self.for_machine, lang='c'): coredata.UserComboOption(
+ 'C language standard to use',
+ ['none', 'c89', 'c99', 'c11', 'c17', 'gnu89', 'gnu99', 'gnu11', 'gnu17'],
+ 'none',
+ )
+ })
+ return opts
+
+ def get_option_compile_args(self, options: 'coredata.KeyedOptionDictType') -> T.List[str]:
+ args = []
+ std = options[OptionKey('std', machine=self.for_machine, lang='c')]
+ if std.value != 'none':
+ args.append('-std=' + std.value)
+ return args
+
+class AppleClangObjCCompiler(ClangObjCCompiler):
+
+ """Handle the differences between Apple's clang and vanilla clang."""
diff --git a/meson/mesonbuild/compilers/objcpp.py b/meson/mesonbuild/compilers/objcpp.py
new file mode 100644
index 000000000..63036557d
--- /dev/null
+++ b/meson/mesonbuild/compilers/objcpp.py
@@ -0,0 +1,110 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import typing as T
+
+from .. import coredata
+from ..mesonlib import MachineChoice, OptionKey
+
+from .mixins.clike import CLikeCompiler
+from .compilers import Compiler
+from .mixins.gnu import GnuCompiler
+from .mixins.clang import ClangCompiler
+
+if T.TYPE_CHECKING:
+ from ..programs import ExternalProgram
+ from ..envconfig import MachineInfo
+ from ..environment import Environment
+ from ..linkers import DynamicLinker
+
+class ObjCPPCompiler(CLikeCompiler, Compiler):
+
+ language = 'objcpp'
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo',
+ exe_wrap: T.Optional['ExternalProgram'],
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ Compiler.__init__(self, exelist, version, for_machine, info,
+ is_cross=is_cross, full_version=full_version,
+ linker=linker)
+ CLikeCompiler.__init__(self, exe_wrap)
+
+ @staticmethod
+ def get_display_language() -> str:
+ return 'Objective-C++'
+
+ def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+ code = '#import<stdio.h>\nclass MyClass;int main(void) { return 0; }\n'
+ return self._sanity_check_impl(work_dir, environment, 'sanitycheckobjcpp.mm', code)
+
+
+class GnuObjCPPCompiler(GnuCompiler, ObjCPPCompiler):
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo',
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ defines: T.Optional[T.Dict[str, str]] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ ObjCPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ GnuCompiler.__init__(self, defines)
+ default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+
+class ClangObjCPPCompiler(ClangCompiler, ObjCPPCompiler):
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo',
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ defines: T.Optional[T.Dict[str, str]] = None,
+ linker: T.Optional['DynamicLinker'] = None,
+ full_version: T.Optional[str] = None):
+ ObjCPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
+ info, exe_wrapper, linker=linker, full_version=full_version)
+ ClangCompiler.__init__(self, defines)
+ default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['-Wextra'],
+ '3': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+
+ def get_options(self) -> 'coredata.KeyedOptionDictType':
+ opts = super().get_options()
+ opts.update({
+ OptionKey('std', machine=self.for_machine, lang='cpp'): coredata.UserComboOption(
+ 'C++ language standard to use',
+ ['none', 'c++98', 'c++11', 'c++14', 'c++17', 'gnu++98', 'gnu++11', 'gnu++14', 'gnu++17'],
+ 'none',
+ )
+ })
+ return opts
+
+ def get_option_compile_args(self, options: 'coredata.KeyedOptionDictType') -> T.List[str]:
+ args = []
+ std = options[OptionKey('std', machine=self.for_machine, lang='cpp')]
+ if std.value != 'none':
+ args.append('-std=' + std.value)
+ return args
+
+
+class AppleClangObjCPPCompiler(ClangObjCPPCompiler):
+
+ """Handle the differences between Apple's clang and vanilla clang."""
diff --git a/meson/mesonbuild/compilers/rust.py b/meson/mesonbuild/compilers/rust.py
new file mode 100644
index 000000000..2b566c8b9
--- /dev/null
+++ b/meson/mesonbuild/compilers/rust.py
@@ -0,0 +1,170 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess, os.path
+import textwrap
+import typing as T
+
+from .. import coredata
+from ..mesonlib import (
+ EnvironmentException, MachineChoice, MesonException, Popen_safe,
+ OptionKey,
+)
+from .compilers import Compiler, rust_buildtype_args, clike_debug_args
+
+if T.TYPE_CHECKING:
+ from ..coredata import KeyedOptionDictType
+ from ..envconfig import MachineInfo
+ from ..environment import Environment # noqa: F401
+ from ..linkers import DynamicLinker
+ from ..programs import ExternalProgram
+
+
+rust_optimization_args = {
+ '0': [],
+ 'g': ['-C', 'opt-level=0'],
+ '1': ['-C', 'opt-level=1'],
+ '2': ['-C', 'opt-level=2'],
+ '3': ['-C', 'opt-level=3'],
+ 's': ['-C', 'opt-level=s'],
+} # type: T.Dict[str, T.List[str]]
+
+class RustCompiler(Compiler):
+
+ # rustc doesn't invoke the compiler itself, it doesn't need a LINKER_PREFIX
+ language = 'rust'
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo',
+ exe_wrapper: T.Optional['ExternalProgram'] = None,
+ full_version: T.Optional[str] = None,
+ linker: T.Optional['DynamicLinker'] = None):
+ super().__init__(exelist, version, for_machine, info,
+ is_cross=is_cross, full_version=full_version,
+ linker=linker)
+ self.exe_wrapper = exe_wrapper
+ self.id = 'rustc'
+ self.base_options.add(OptionKey('b_colorout'))
+ if 'link' in self.linker.id:
+ self.base_options.add(OptionKey('b_vscrt'))
+
+ def needs_static_linker(self) -> bool:
+ return False
+
+ def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+ source_name = os.path.join(work_dir, 'sanity.rs')
+ output_name = os.path.join(work_dir, 'rusttest')
+ with open(source_name, 'w', encoding='utf-8') as ofile:
+ ofile.write(textwrap.dedent(
+ '''fn main() {
+ }
+ '''))
+ pc = subprocess.Popen(self.exelist + ['-o', output_name, source_name],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ cwd=work_dir)
+ _stdo, _stde = pc.communicate()
+ assert isinstance(_stdo, bytes)
+ assert isinstance(_stde, bytes)
+ stdo = _stdo.decode('utf-8', errors='replace')
+ stde = _stde.decode('utf-8', errors='replace')
+ if pc.returncode != 0:
+ raise EnvironmentException('Rust compiler {} can not compile programs.\n{}\n{}'.format(
+ self.name_string(),
+ stdo,
+ stde))
+ if self.is_cross:
+ if self.exe_wrapper is None:
+ # Can't check if the binaries run so we have to assume they do
+ return
+ cmdlist = self.exe_wrapper.get_command() + [output_name]
+ else:
+ cmdlist = [output_name]
+ pe = subprocess.Popen(cmdlist, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
+ pe.wait()
+ if pe.returncode != 0:
+ raise EnvironmentException('Executables created by Rust compiler %s are not runnable.' % self.name_string())
+
+ def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+ return ['--dep-info', outfile]
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return rust_buildtype_args[buildtype]
+
+ def get_sysroot(self) -> str:
+ cmd = self.exelist + ['--print', 'sysroot']
+ p, stdo, stde = Popen_safe(cmd)
+ return stdo.split('\n')[0]
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ return clike_debug_args[is_debug]
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return rust_optimization_args[optimization_level]
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+ build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-L':
+ for j in ['dependency', 'crate', 'native', 'framework', 'all']:
+ combined_len = len(j) + 3
+ if i[:combined_len] == f'-L{j}=':
+ parameter_list[idx] = i[:combined_len] + os.path.normpath(os.path.join(build_dir, i[combined_len:]))
+ break
+
+ return parameter_list
+
+ def get_output_args(self, outputname: str) -> T.List[str]:
+ return ['-o', outputname]
+
+ @classmethod
+ def use_linker_args(cls, linker: str) -> T.List[str]:
+ return ['-C', f'linker={linker}']
+
+ # Rust does not have a use_linker_args because it dispatches to a gcc-like
+ # C compiler for dynamic linking, as such we invoke the C compiler's
+ # use_linker_args method instead.
+
+ def get_options(self) -> 'KeyedOptionDictType':
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ return {
+ key: coredata.UserComboOption(
+ 'Rust Eddition to use',
+ ['none', '2015', '2018'],
+ 'none',
+ ),
+ }
+
+ def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ args = []
+ key = OptionKey('std', machine=self.for_machine, lang=self.language)
+ std = options[key]
+ if std.value != 'none':
+ args.append('--edition=' + std.value)
+ return args
+
+ def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+ # Rust handles this for us, we don't need to do anything
+ return []
+
+ def get_colorout_args(self, colortype: str) -> T.List[str]:
+ if colortype in {'always', 'never', 'auto'}:
+ return [f'--color={colortype}']
+ raise MesonException(f'Invalid color type for rust {colortype}')
+
+ def get_linker_always_args(self) -> T.List[str]:
+ args: T.List[str] = []
+ for a in super().get_linker_always_args():
+ args.extend(['-C', f'link-arg={a}'])
+ return args
diff --git a/meson/mesonbuild/compilers/swift.py b/meson/mesonbuild/compilers/swift.py
new file mode 100644
index 000000000..2d52e2182
--- /dev/null
+++ b/meson/mesonbuild/compilers/swift.py
@@ -0,0 +1,127 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess, os.path
+import typing as T
+
+from ..mesonlib import EnvironmentException, MachineChoice
+
+from .compilers import Compiler, swift_buildtype_args, clike_debug_args
+
+if T.TYPE_CHECKING:
+ from ..envconfig import MachineInfo
+ from ..environment import Environment
+ from ..linkers import DynamicLinker
+
+swift_optimization_args = {
+ '0': [],
+ 'g': [],
+ '1': ['-O'],
+ '2': ['-O'],
+ '3': ['-O'],
+ 's': ['-O'],
+} # type: T.Dict[str, T.List[str]]
+
+class SwiftCompiler(Compiler):
+
+ LINKER_PREFIX = ['-Xlinker']
+ language = 'swift'
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo', full_version: T.Optional[str] = None,
+ linker: T.Optional['DynamicLinker'] = None):
+ super().__init__(exelist, version, for_machine, info,
+ is_cross=is_cross, full_version=full_version,
+ linker=linker)
+ self.version = version
+ self.id = 'llvm'
+
+ def needs_static_linker(self) -> bool:
+ return True
+
+ def get_werror_args(self) -> T.List[str]:
+ return ['--fatal-warnings']
+
+ def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+ return ['-emit-dependencies']
+
+ def depfile_for_object(self, objfile: str) -> str:
+ return os.path.splitext(objfile)[0] + '.' + self.get_depfile_suffix()
+
+ def get_depfile_suffix(self) -> str:
+ return 'd'
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return ['-o', target]
+
+ def get_header_import_args(self, headername: str) -> T.List[str]:
+ return ['-import-objc-header', headername]
+
+ def get_warn_args(self, level: str) -> T.List[str]:
+ return []
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return swift_buildtype_args[buildtype]
+
+ def get_std_exe_link_args(self) -> T.List[str]:
+ return ['-emit-executable']
+
+ def get_module_args(self, modname: str) -> T.List[str]:
+ return ['-module-name', modname]
+
+ def get_mod_gen_args(self) -> T.List[str]:
+ return ['-emit-module']
+
+ def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+ return ['-I' + path]
+
+ def get_compile_only_args(self) -> T.List[str]:
+ return ['-c']
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+ build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-I' or i[:2] == '-L':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+ return parameter_list
+
+ def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+ src = 'swifttest.swift'
+ source_name = os.path.join(work_dir, src)
+ output_name = os.path.join(work_dir, 'swifttest')
+ extra_flags: T.List[str] = []
+ extra_flags += environment.coredata.get_external_args(self.for_machine, self.language)
+ if self.is_cross:
+ extra_flags += self.get_compile_only_args()
+ else:
+ extra_flags += environment.coredata.get_external_link_args(self.for_machine, self.language)
+ with open(source_name, 'w', encoding='utf-8') as ofile:
+ ofile.write('''print("Swift compilation is working.")
+''')
+ pc = subprocess.Popen(self.exelist + extra_flags + ['-emit-executable', '-o', output_name, src], cwd=work_dir)
+ pc.wait()
+ if pc.returncode != 0:
+ raise EnvironmentException('Swift compiler %s can not compile programs.' % self.name_string())
+ if self.is_cross:
+ # Can't check if the binaries run so we have to assume they do
+ return
+ if subprocess.call(output_name) != 0:
+ raise EnvironmentException('Executables created by Swift compiler %s are not runnable.' % self.name_string())
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ return clike_debug_args[is_debug]
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return swift_optimization_args[optimization_level]
diff --git a/meson/mesonbuild/compilers/vala.py b/meson/mesonbuild/compilers/vala.py
new file mode 100644
index 000000000..b8144f6bc
--- /dev/null
+++ b/meson/mesonbuild/compilers/vala.py
@@ -0,0 +1,138 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path
+import typing as T
+
+from .. import mlog
+from ..mesonlib import EnvironmentException, MachineChoice, version_compare, OptionKey
+
+from .compilers import Compiler, LibType
+
+if T.TYPE_CHECKING:
+ from ..envconfig import MachineInfo
+ from ..environment import Environment
+
+class ValaCompiler(Compiler):
+
+ language = 'vala'
+
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo'):
+ super().__init__(exelist, version, for_machine, info, is_cross=is_cross)
+ self.version = version
+ self.id = 'valac'
+ self.base_options = {OptionKey('b_colorout')}
+
+ def needs_static_linker(self) -> bool:
+ return False # Because compiles into C.
+
+ def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+ return []
+
+ def get_debug_args(self, is_debug: bool) -> T.List[str]:
+ return ['--debug'] if is_debug else []
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return [] # Because compiles into C.
+
+ def get_compile_only_args(self) -> T.List[str]:
+ return [] # Because compiles into C.
+
+ def get_pic_args(self) -> T.List[str]:
+ return []
+
+ def get_pie_args(self) -> T.List[str]:
+ return []
+
+ def get_pie_link_args(self) -> T.List[str]:
+ return []
+
+ def get_always_args(self) -> T.List[str]:
+ return ['-C']
+
+ def get_warn_args(self, warning_level: str) -> T.List[str]:
+ return []
+
+ def get_no_warn_args(self) -> T.List[str]:
+ return ['--disable-warnings']
+
+ def get_werror_args(self) -> T.List[str]:
+ return ['--fatal-warnings']
+
+ def get_colorout_args(self, colortype: str) -> T.List[str]:
+ if version_compare(self.version, '>=0.37.1'):
+ return ['--color=' + colortype]
+ return []
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+ build_dir: str) -> T.List[str]:
+ for idx, i in enumerate(parameter_list):
+ if i[:9] == '--girdir=':
+ parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+ if i[:10] == '--vapidir=':
+ parameter_list[idx] = i[:10] + os.path.normpath(os.path.join(build_dir, i[10:]))
+ if i[:13] == '--includedir=':
+ parameter_list[idx] = i[:13] + os.path.normpath(os.path.join(build_dir, i[13:]))
+ if i[:14] == '--metadatadir=':
+ parameter_list[idx] = i[:14] + os.path.normpath(os.path.join(build_dir, i[14:]))
+
+ return parameter_list
+
+ def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+ code = 'class MesonSanityCheck : Object { }'
+ extra_flags: T.List[str] = []
+ extra_flags += environment.coredata.get_external_args(self.for_machine, self.language)
+ if self.is_cross:
+ extra_flags += self.get_compile_only_args()
+ else:
+ extra_flags += environment.coredata.get_external_link_args(self.for_machine, self.language)
+ with self.cached_compile(code, environment.coredata, extra_args=extra_flags, mode='compile') as p:
+ if p.returncode != 0:
+ msg = f'Vala compiler {self.name_string()!r} can not compile programs'
+ raise EnvironmentException(msg)
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ if buildtype in {'debug', 'debugoptimized', 'minsize'}:
+ return ['--debug']
+ return []
+
+ def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
+ libtype: LibType = LibType.PREFER_SHARED) -> T.Optional[T.List[str]]:
+ if extra_dirs and isinstance(extra_dirs, str):
+ extra_dirs = [extra_dirs]
+ # Valac always looks in the default vapi dir, so only search there if
+ # no extra dirs are specified.
+ if not extra_dirs:
+ code = 'class MesonFindLibrary : Object { }'
+ args: T.List[str] = []
+ args += env.coredata.get_external_args(self.for_machine, self.language)
+ vapi_args = ['--pkg', libname]
+ args += vapi_args
+ with self.cached_compile(code, env.coredata, extra_args=args, mode='compile') as p:
+ if p.returncode == 0:
+ return vapi_args
+ # Not found? Try to find the vapi file itself.
+ for d in extra_dirs:
+ vapi = os.path.join(d, libname + '.vapi')
+ if os.path.isfile(vapi):
+ return [vapi]
+ mlog.debug(f'Searched {extra_dirs!r} and {libname!r} wasn\'t found')
+ return None
+
+ def thread_flags(self, env: 'Environment') -> T.List[str]:
+ return []
+
+ def thread_link_flags(self, env: 'Environment') -> T.List[str]:
+ return []
diff --git a/meson/mesonbuild/coredata.py b/meson/mesonbuild/coredata.py
new file mode 100644
index 000000000..528ca9c66
--- /dev/null
+++ b/meson/mesonbuild/coredata.py
@@ -0,0 +1,1228 @@
+# Copyright 2012-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import mlog, mparser
+import pickle, os, uuid
+import sys
+from itertools import chain
+from pathlib import PurePath
+from collections import OrderedDict
+from .mesonlib import (
+ HoldableObject,
+ MesonException, EnvironmentException, MachineChoice, PerMachine,
+ PerMachineDefaultable, default_libdir, default_libexecdir,
+ default_prefix, split_args, OptionKey, OptionType, stringlistify,
+)
+from .wrap import WrapMode
+import ast
+import argparse
+import configparser
+import enum
+import shlex
+import typing as T
+
+if T.TYPE_CHECKING:
+ from . import dependencies
+ from .compilers.compilers import Compiler, CompileResult # noqa: F401
+ from .environment import Environment
+ from .mesonlib import OptionOverrideProxy
+ from .cmake.traceparser import CMakeCacheEntry
+
+ OptionDictType = T.Union[T.Dict[str, 'UserOption[T.Any]'], OptionOverrideProxy]
+ KeyedOptionDictType = T.Union[T.Dict['OptionKey', 'UserOption[T.Any]'], OptionOverrideProxy]
+ CompilerCheckCacheKey = T.Tuple[T.Tuple[str, ...], str, str, T.Tuple[str, ...], str]
+
+version = '0.59.3'
+backendlist = ['ninja', 'vs', 'vs2010', 'vs2012', 'vs2013', 'vs2015', 'vs2017', 'vs2019', 'xcode']
+
+default_yielding = False
+
+# Can't bind this near the class method it seems, sadly.
+_T = T.TypeVar('_T')
+
+
+class MesonVersionMismatchException(MesonException):
+ '''Build directory generated with Meson version is incompatible with current version'''
+ def __init__(self, old_version: str, current_version: str) -> None:
+ super().__init__(f'Build directory has been generated with Meson version {old_version}, '
+ f'which is incompatible with the current version {current_version}.')
+ self.old_version = old_version
+ self.current_version = current_version
+
+
+class UserOption(T.Generic[_T], HoldableObject):
+ def __init__(self, description: str, choices: T.Optional[T.Union[str, T.List[_T]]], yielding: T.Optional[bool]):
+ super().__init__()
+ self.choices = choices
+ self.description = description
+ if yielding is None:
+ yielding = default_yielding
+ if not isinstance(yielding, bool):
+ raise MesonException('Value of "yielding" must be a boolean.')
+ self.yielding = yielding
+
+ def printable_value(self) -> T.Union[str, int, bool, T.List[T.Union[str, int, bool]]]:
+ assert isinstance(self.value, (str, int, bool, list))
+ return self.value
+
+ # Check that the input is a valid value and return the
+ # "cleaned" or "native" version. For example the Boolean
+ # option could take the string "true" and return True.
+ def validate_value(self, value: T.Any) -> _T:
+ raise RuntimeError('Derived option class did not override validate_value.')
+
+ def set_value(self, newvalue: T.Any) -> None:
+ self.value = self.validate_value(newvalue)
+
+class UserStringOption(UserOption[str]):
+ def __init__(self, description: str, value: T.Any, yielding: T.Optional[bool] = None):
+ super().__init__(description, None, yielding)
+ self.set_value(value)
+
+ def validate_value(self, value: T.Any) -> str:
+ if not isinstance(value, str):
+ raise MesonException('Value "%s" for string option is not a string.' % str(value))
+ return value
+
+class UserBooleanOption(UserOption[bool]):
+ def __init__(self, description: str, value, yielding: T.Optional[bool] = None) -> None:
+ super().__init__(description, [True, False], yielding)
+ self.set_value(value)
+
+ def __bool__(self) -> bool:
+ return self.value
+
+ def validate_value(self, value: T.Any) -> bool:
+ if isinstance(value, bool):
+ return value
+ if not isinstance(value, str):
+ raise MesonException(f'Value {value} cannot be converted to a boolean')
+ if value.lower() == 'true':
+ return True
+ if value.lower() == 'false':
+ return False
+ raise MesonException('Value %s is not boolean (true or false).' % value)
+
+class UserIntegerOption(UserOption[int]):
+ def __init__(self, description: str, value: T.Any, yielding: T.Optional[bool] = None):
+ min_value, max_value, default_value = value
+ self.min_value = min_value
+ self.max_value = max_value
+ c = []
+ if min_value is not None:
+ c.append('>=' + str(min_value))
+ if max_value is not None:
+ c.append('<=' + str(max_value))
+ choices = ', '.join(c)
+ super().__init__(description, choices, yielding)
+ self.set_value(default_value)
+
+ def validate_value(self, value: T.Any) -> int:
+ if isinstance(value, str):
+ value = self.toint(value)
+ if not isinstance(value, int):
+ raise MesonException('New value for integer option is not an integer.')
+ if self.min_value is not None and value < self.min_value:
+ raise MesonException('New value %d is less than minimum value %d.' % (value, self.min_value))
+ if self.max_value is not None and value > self.max_value:
+ raise MesonException('New value %d is more than maximum value %d.' % (value, self.max_value))
+ return value
+
+ def toint(self, valuestring: str) -> int:
+ try:
+ return int(valuestring)
+ except ValueError:
+ raise MesonException('Value string "%s" is not convertible to an integer.' % valuestring)
+
+class OctalInt(int):
+ # NinjaBackend.get_user_option_args uses str() to converts it to a command line option
+ # UserUmaskOption.toint() uses int(str, 8) to convert it to an integer
+ # So we need to use oct instead of dec here if we do not want values to be misinterpreted.
+ def __str__(self):
+ return oct(int(self))
+
+class UserUmaskOption(UserIntegerOption, UserOption[T.Union[str, OctalInt]]):
+ def __init__(self, description: str, value: T.Any, yielding: T.Optional[bool] = None):
+ super().__init__(description, (0, 0o777, value), yielding)
+ self.choices = ['preserve', '0000-0777']
+
+ def printable_value(self) -> str:
+ if self.value == 'preserve':
+ return self.value
+ return format(self.value, '04o')
+
+ def validate_value(self, value: T.Any) -> T.Union[str, OctalInt]:
+ if value is None or value == 'preserve':
+ return 'preserve'
+ return OctalInt(super().validate_value(value))
+
+ def toint(self, valuestring: T.Union[str, OctalInt]) -> int:
+ try:
+ return int(valuestring, 8)
+ except ValueError as e:
+ raise MesonException(f'Invalid mode: {e}')
+
+class UserComboOption(UserOption[str]):
+ def __init__(self, description: str, choices: T.List[str], value: T.Any, yielding: T.Optional[bool] = None):
+ super().__init__(description, choices, yielding)
+ if not isinstance(self.choices, list):
+ raise MesonException('Combo choices must be an array.')
+ for i in self.choices:
+ if not isinstance(i, str):
+ raise MesonException('Combo choice elements must be strings.')
+ self.set_value(value)
+
+ def validate_value(self, value: T.Any) -> str:
+ if value not in self.choices:
+ if isinstance(value, bool):
+ _type = 'boolean'
+ elif isinstance(value, (int, float)):
+ _type = 'number'
+ else:
+ _type = 'string'
+ optionsstring = ', '.join([f'"{item}"' for item in self.choices])
+ raise MesonException('Value "{}" (of type "{}") for combo option "{}" is not one of the choices.'
+ ' Possible choices are (as string): {}.'.format(
+ value, _type, self.description, optionsstring))
+ return value
+
+class UserArrayOption(UserOption[T.List[str]]):
+ def __init__(self, description: str, value: T.Union[str, T.List[str]], split_args: bool = False, user_input: bool = False, allow_dups: bool = False, **kwargs: T.Any) -> None:
+ super().__init__(description, kwargs.get('choices', []), yielding=kwargs.get('yielding', None))
+ self.split_args = split_args
+ self.allow_dups = allow_dups
+ self.value = self.validate_value(value, user_input=user_input)
+
+ def validate_value(self, value: T.Union[str, T.List[str]], user_input: bool = True) -> T.List[str]:
+ # User input is for options defined on the command line (via -D
+ # options). Users can put their input in as a comma separated
+ # string, but for defining options in meson_options.txt the format
+ # should match that of a combo
+ if not user_input and isinstance(value, str) and not value.startswith('['):
+ raise MesonException('Value does not define an array: ' + value)
+
+ if isinstance(value, str):
+ if value.startswith('['):
+ try:
+ newvalue = ast.literal_eval(value)
+ except ValueError:
+ raise MesonException(f'malformed option {value}')
+ elif value == '':
+ newvalue = []
+ else:
+ if self.split_args:
+ newvalue = split_args(value)
+ else:
+ newvalue = [v.strip() for v in value.split(',')]
+ elif isinstance(value, list):
+ newvalue = value
+ else:
+ raise MesonException(f'"{newvalue}" should be a string array, but it is not')
+
+ if not self.allow_dups and len(set(newvalue)) != len(newvalue):
+ msg = 'Duplicated values in array option is deprecated. ' \
+ 'This will become a hard error in the future.'
+ mlog.deprecation(msg)
+ for i in newvalue:
+ if not isinstance(i, str):
+ raise MesonException(f'String array element "{newvalue!s}" is not a string.')
+ if self.choices:
+ bad = [x for x in newvalue if x not in self.choices]
+ if bad:
+ raise MesonException('Options "{}" are not in allowed choices: "{}"'.format(
+ ', '.join(bad), ', '.join(self.choices)))
+ return newvalue
+
+ def extend_value(self, value: T.Union[str, T.List[str]]) -> None:
+ """Extend the value with an additional value."""
+ new = self.validate_value(value)
+ self.set_value(self.value + new)
+
+
+class UserFeatureOption(UserComboOption):
+ static_choices = ['enabled', 'disabled', 'auto']
+
+ def __init__(self, description: str, value: T.Any, yielding: T.Optional[bool] = None):
+ super().__init__(description, self.static_choices, value, yielding)
+ self.name: T.Optional[str] = None # TODO: Refactor options to all store their name
+
+ def is_enabled(self) -> bool:
+ return self.value == 'enabled'
+
+ def is_disabled(self) -> bool:
+ return self.value == 'disabled'
+
+ def is_auto(self) -> bool:
+ return self.value == 'auto'
+
+if T.TYPE_CHECKING:
+ from .dependencies.detect import TV_DepIDEntry, TV_DepID
+
+
+class DependencyCacheType(enum.Enum):
+
+ OTHER = 0
+ PKG_CONFIG = 1
+ CMAKE = 2
+
+ @classmethod
+ def from_type(cls, dep: 'dependencies.Dependency') -> 'DependencyCacheType':
+ from . import dependencies
+ # As more types gain search overrides they'll need to be added here
+ if isinstance(dep, dependencies.PkgConfigDependency):
+ return cls.PKG_CONFIG
+ if isinstance(dep, dependencies.CMakeDependency):
+ return cls.CMAKE
+ return cls.OTHER
+
+
+class DependencySubCache:
+
+ def __init__(self, type_: DependencyCacheType):
+ self.types = [type_]
+ self.__cache: T.Dict[T.Tuple[str, ...], 'dependencies.Dependency'] = {}
+
+ def __getitem__(self, key: T.Tuple[str, ...]) -> 'dependencies.Dependency':
+ return self.__cache[key]
+
+ def __setitem__(self, key: T.Tuple[str, ...], value: 'dependencies.Dependency') -> None:
+ self.__cache[key] = value
+
+ def __contains__(self, key: T.Tuple[str, ...]) -> bool:
+ return key in self.__cache
+
+ def values(self) -> T.Iterable['dependencies.Dependency']:
+ return self.__cache.values()
+
+
+class DependencyCache:
+
+ """Class that stores a cache of dependencies.
+
+ This class is meant to encapsulate the fact that we need multiple keys to
+ successfully lookup by providing a simple get/put interface.
+ """
+
+ def __init__(self, builtins: 'KeyedOptionDictType', for_machine: MachineChoice):
+ self.__cache = OrderedDict() # type: T.MutableMapping[TV_DepID, DependencySubCache]
+ self.__builtins = builtins
+ self.__pkg_conf_key = OptionKey('pkg_config_path', machine=for_machine)
+ self.__cmake_key = OptionKey('cmake_prefix_path', machine=for_machine)
+
+ def __calculate_subkey(self, type_: DependencyCacheType) -> T.Tuple[str, ...]:
+ data: T.Dict[str, T.List[str]] = {
+ DependencyCacheType.PKG_CONFIG: stringlistify(self.__builtins[self.__pkg_conf_key].value),
+ DependencyCacheType.CMAKE: stringlistify(self.__builtins[self.__cmake_key].value),
+ DependencyCacheType.OTHER: [],
+ }
+ assert type_ in data, 'Someone forgot to update subkey calculations for a new type'
+ return tuple(data[type_])
+
+ def __iter__(self) -> T.Iterator['TV_DepID']:
+ return self.keys()
+
+ def put(self, key: 'TV_DepID', dep: 'dependencies.Dependency') -> None:
+ t = DependencyCacheType.from_type(dep)
+ if key not in self.__cache:
+ self.__cache[key] = DependencySubCache(t)
+ subkey = self.__calculate_subkey(t)
+ self.__cache[key][subkey] = dep
+
+ def get(self, key: 'TV_DepID') -> T.Optional['dependencies.Dependency']:
+ """Get a value from the cache.
+
+ If there is no cache entry then None will be returned.
+ """
+ try:
+ val = self.__cache[key]
+ except KeyError:
+ return None
+
+ for t in val.types:
+ subkey = self.__calculate_subkey(t)
+ try:
+ return val[subkey]
+ except KeyError:
+ pass
+ return None
+
+ def values(self) -> T.Iterator['dependencies.Dependency']:
+ for c in self.__cache.values():
+ yield from c.values()
+
+ def keys(self) -> T.Iterator['TV_DepID']:
+ return iter(self.__cache.keys())
+
+ def items(self) -> T.Iterator[T.Tuple['TV_DepID', T.List['dependencies.Dependency']]]:
+ for k, v in self.__cache.items():
+ vs = []
+ for t in v.types:
+ subkey = self.__calculate_subkey(t)
+ if subkey in v:
+ vs.append(v[subkey])
+ yield k, vs
+
+ def clear(self) -> None:
+ self.__cache.clear()
+
+
+class CMakeStateCache:
+ """Class that stores internal CMake compiler states.
+
+ This cache is used to reduce the startup overhead of CMake by caching
+ all internal CMake compiler variables.
+ """
+
+ def __init__(self) -> None:
+ self.__cache: T.Dict[str, T.Dict[str, T.List[str]]] = {}
+ self.cmake_cache: T.Dict[str, 'CMakeCacheEntry'] = {}
+
+ def __iter__(self) -> T.Iterator[T.Tuple[str, T.Dict[str, T.List[str]]]]:
+ return iter(self.__cache.items())
+
+ def items(self) -> T.Iterator[T.Tuple[str, T.Dict[str, T.List[str]]]]:
+ return iter(self.__cache.items())
+
+ def update(self, language: str, variables: T.Dict[str, T.List[str]]):
+ if language not in self.__cache:
+ self.__cache[language] = {}
+ self.__cache[language].update(variables)
+
+ @property
+ def languages(self) -> T.Set[str]:
+ return set(self.__cache.keys())
+
+
+# Can't bind this near the class method it seems, sadly.
+_V = T.TypeVar('_V')
+
+# This class contains all data that must persist over multiple
+# invocations of Meson. It is roughly the same thing as
+# cmakecache.
+
+class CoreData:
+
+ def __init__(self, options: argparse.Namespace, scratch_dir: str, meson_command: T.List[str]):
+ self.lang_guids = {
+ 'default': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
+ 'c': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
+ 'cpp': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
+ 'test': '3AC096D0-A1C2-E12C-1390-A8335801FDAB',
+ 'directory': '2150E333-8FDC-42A3-9474-1A3956D46DE8',
+ }
+ self.test_guid = str(uuid.uuid4()).upper()
+ self.regen_guid = str(uuid.uuid4()).upper()
+ self.install_guid = str(uuid.uuid4()).upper()
+ self.meson_command = meson_command
+ self.target_guids = {}
+ self.version = version
+ self.options: 'KeyedOptionDictType' = {}
+ self.cross_files = self.__load_config_files(options, scratch_dir, 'cross')
+ self.compilers = PerMachine(OrderedDict(), OrderedDict()) # type: PerMachine[T.Dict[str, Compiler]]
+
+ # Set of subprojects that have already been initialized once, this is
+ # required to be stored and reloaded with the coredata, as we don't
+ # want to overwrite options for such subprojects.
+ self.initialized_subprojects: T.Set[str] = set()
+
+ # For host == build configuraitons these caches should be the same.
+ self.deps: PerMachine[DependencyCache] = PerMachineDefaultable.default(
+ self.is_cross_build(),
+ DependencyCache(self.options, MachineChoice.BUILD),
+ DependencyCache(self.options, MachineChoice.HOST))
+
+ self.compiler_check_cache = OrderedDict() # type: T.Dict[CompilerCheckCacheKey, compiler.CompileResult]
+
+ # CMake cache
+ self.cmake_cache: PerMachine[CMakeStateCache] = PerMachine(CMakeStateCache(), CMakeStateCache())
+
+ # Only to print a warning if it changes between Meson invocations.
+ self.config_files = self.__load_config_files(options, scratch_dir, 'native')
+ self.builtin_options_libdir_cross_fixup()
+ self.init_builtins('')
+
+ @staticmethod
+ def __load_config_files(options: argparse.Namespace, scratch_dir: str, ftype: str) -> T.List[str]:
+ # Need to try and make the passed filenames absolute because when the
+ # files are parsed later we'll have chdir()d.
+ if ftype == 'cross':
+ filenames = options.cross_file
+ else:
+ filenames = options.native_file
+
+ if not filenames:
+ return []
+
+ found_invalid = [] # type: T.List[str]
+ missing = [] # type: T.List[str]
+ real = [] # type: T.List[str]
+ for i, f in enumerate(filenames):
+ f = os.path.expanduser(os.path.expandvars(f))
+ if os.path.exists(f):
+ if os.path.isfile(f):
+ real.append(os.path.abspath(f))
+ continue
+ elif os.path.isdir(f):
+ found_invalid.append(os.path.abspath(f))
+ else:
+ # in this case we've been passed some kind of pipe, copy
+ # the contents of that file into the meson private (scratch)
+ # directory so that it can be re-read when wiping/reconfiguring
+ copy = os.path.join(scratch_dir, f'{uuid.uuid4()}.{ftype}.ini')
+ with open(f, encoding='utf-8') as rf:
+ with open(copy, 'w', encoding='utf-8') as wf:
+ wf.write(rf.read())
+ real.append(copy)
+
+ # Also replace the command line argument, as the pipe
+ # probably won't exist on reconfigure
+ filenames[i] = copy
+ continue
+ if sys.platform != 'win32':
+ paths = [
+ os.environ.get('XDG_DATA_HOME', os.path.expanduser('~/.local/share')),
+ ] + os.environ.get('XDG_DATA_DIRS', '/usr/local/share:/usr/share').split(':')
+ for path in paths:
+ path_to_try = os.path.join(path, 'meson', ftype, f)
+ if os.path.isfile(path_to_try):
+ real.append(path_to_try)
+ break
+ else:
+ missing.append(f)
+ else:
+ missing.append(f)
+
+ if missing:
+ if found_invalid:
+ mlog.log('Found invalid candidates for', ftype, 'file:', *found_invalid)
+ mlog.log('Could not find any valid candidate for', ftype, 'files:', *missing)
+ raise MesonException(f'Cannot find specified {ftype} file: {f}')
+ return real
+
+ def builtin_options_libdir_cross_fixup(self):
+ # By default set libdir to "lib" when cross compiling since
+ # getting the "system default" is always wrong on multiarch
+ # platforms as it gets a value like lib/x86_64-linux-gnu.
+ if self.cross_files:
+ BUILTIN_OPTIONS[OptionKey('libdir')].default = 'lib'
+
+ def sanitize_prefix(self, prefix):
+ prefix = os.path.expanduser(prefix)
+ if not os.path.isabs(prefix):
+ raise MesonException(f'prefix value {prefix!r} must be an absolute path')
+ if prefix.endswith('/') or prefix.endswith('\\'):
+ # On Windows we need to preserve the trailing slash if the
+ # string is of type 'C:\' because 'C:' is not an absolute path.
+ if len(prefix) == 3 and prefix[1] == ':':
+ pass
+ # If prefix is a single character, preserve it since it is
+ # the root directory.
+ elif len(prefix) == 1:
+ pass
+ else:
+ prefix = prefix[:-1]
+ return prefix
+
+ def sanitize_dir_option_value(self, prefix: str, option: OptionKey, value: T.Any) -> T.Any:
+ '''
+ If the option is an installation directory option and the value is an
+ absolute path, check that it resides within prefix and return the value
+ as a path relative to the prefix.
+
+ This way everyone can do f.ex, get_option('libdir') and be sure to get
+ the library directory relative to prefix.
+
+ .as_posix() keeps the posix-like file seperators Meson uses.
+ '''
+ try:
+ value = PurePath(value)
+ except TypeError:
+ return value
+ if option.name.endswith('dir') and value.is_absolute() and \
+ option not in BULITIN_DIR_NOPREFIX_OPTIONS:
+ # Value must be a subdir of the prefix
+ # commonpath will always return a path in the native format, so we
+ # must use pathlib.PurePath to do the same conversion before
+ # comparing.
+ msg = ('The value of the \'{!s}\' option is \'{!s}\' which must be a '
+ 'subdir of the prefix {!r}.\nNote that if you pass a '
+ 'relative path, it is assumed to be a subdir of prefix.')
+ # os.path.commonpath doesn't understand case-insensitive filesystems,
+ # but PurePath().relative_to() does.
+ try:
+ value = value.relative_to(prefix)
+ except ValueError:
+ raise MesonException(msg.format(option, value, prefix))
+ if '..' in str(value):
+ raise MesonException(msg.format(option, value, prefix))
+ return value.as_posix()
+
+ def init_builtins(self, subproject: str) -> None:
+ # Create builtin options with default values
+ for key, opt in BUILTIN_OPTIONS.items():
+ self.add_builtin_option(self.options, key.evolve(subproject=subproject), opt)
+ for for_machine in iter(MachineChoice):
+ for key, opt in BUILTIN_OPTIONS_PER_MACHINE.items():
+ self.add_builtin_option(self.options, key.evolve(subproject=subproject, machine=for_machine), opt)
+
+ @staticmethod
+ def add_builtin_option(opts_map: 'KeyedOptionDictType', key: OptionKey,
+ opt: 'BuiltinOption') -> None:
+ if key.subproject:
+ if opt.yielding:
+ # This option is global and not per-subproject
+ return
+ value = opts_map[key.as_root()].value
+ else:
+ value = None
+ opts_map[key] = opt.init_option(key, value, default_prefix())
+
+ def init_backend_options(self, backend_name: str) -> None:
+ if backend_name == 'ninja':
+ self.options[OptionKey('backend_max_links')] = UserIntegerOption(
+ 'Maximum number of linker processes to run or 0 for no '
+ 'limit',
+ (0, None, 0))
+ elif backend_name.startswith('vs'):
+ self.options[OptionKey('backend_startup_project')] = UserStringOption(
+ 'Default project to execute in Visual Studio',
+ '')
+
+ def get_option(self, key: OptionKey) -> T.Union[str, int, bool, WrapMode]:
+ try:
+ v = self.options[key].value
+ if key.name == 'wrap_mode':
+ return WrapMode[v]
+ return v
+ except KeyError:
+ pass
+
+ try:
+ v = self.options[key.as_root()]
+ if v.yielding:
+ if key.name == 'wrap_mode':
+ return WrapMode[v.value]
+ return v.value
+ except KeyError:
+ pass
+
+ raise MesonException(f'Tried to get unknown builtin option {str(key)}')
+
+ def set_option(self, key: OptionKey, value) -> None:
+ if key.is_builtin():
+ if key.name == 'prefix':
+ value = self.sanitize_prefix(value)
+ else:
+ prefix = self.options[OptionKey('prefix')].value
+ value = self.sanitize_dir_option_value(prefix, key, value)
+
+ try:
+ self.options[key].set_value(value)
+ except KeyError:
+ raise MesonException(f'Tried to set unknown builtin option {str(key)}')
+
+ if key.name == 'buildtype':
+ self._set_others_from_buildtype(value)
+ elif key.name in {'wrap_mode', 'force_fallback_for'}:
+ # We could have the system dependency cached for a dependency that
+ # is now forced to use subproject fallback. We probably could have
+ # more fine grained cache invalidation, but better be safe.
+ self.clear_deps_cache()
+
+ def clear_deps_cache(self):
+ self.deps.host.clear()
+ self.deps.build.clear()
+
+ def get_nondefault_buildtype_args(self):
+ result= []
+ value = self.options[OptionKey('buildtype')].value
+ if value == 'plain':
+ opt = '0'
+ debug = False
+ elif value == 'debug':
+ opt = '0'
+ debug = True
+ elif value == 'debugoptimized':
+ opt = '2'
+ debug = True
+ elif value == 'release':
+ opt = '3'
+ debug = False
+ elif value == 'minsize':
+ opt = 's'
+ debug = True
+ else:
+ assert(value == 'custom')
+ return []
+ actual_opt = self.options[OptionKey('optimization')].value
+ actual_debug = self.options[OptionKey('debug')].value
+ if actual_opt != opt:
+ result.append(('optimization', actual_opt, opt))
+ if actual_debug != debug:
+ result.append(('debug', actual_debug, debug))
+ return result
+
+ def _set_others_from_buildtype(self, value: str) -> None:
+ if value == 'plain':
+ opt = '0'
+ debug = False
+ elif value == 'debug':
+ opt = '0'
+ debug = True
+ elif value == 'debugoptimized':
+ opt = '2'
+ debug = True
+ elif value == 'release':
+ opt = '3'
+ debug = False
+ elif value == 'minsize':
+ opt = 's'
+ debug = True
+ else:
+ assert(value == 'custom')
+ return
+ self.options[OptionKey('optimization')].set_value(opt)
+ self.options[OptionKey('debug')].set_value(debug)
+
+ @staticmethod
+ def is_per_machine_option(optname: OptionKey) -> bool:
+ if optname.name in BUILTIN_OPTIONS_PER_MACHINE:
+ return True
+ return optname.lang is not None
+
+ def validate_option_value(self, option_name: OptionKey, override_value):
+ try:
+ opt = self.options[option_name]
+ except KeyError:
+ raise MesonException(f'Tried to validate unknown option {str(option_name)}')
+ try:
+ return opt.validate_value(override_value)
+ except MesonException as e:
+ raise type(e)(('Validation failed for option %s: ' % option_name) + str(e)) \
+ .with_traceback(sys.exc_info()[2])
+
+ def get_external_args(self, for_machine: MachineChoice, lang: str) -> T.Union[str, T.List[str]]:
+ return self.options[OptionKey('args', machine=for_machine, lang=lang)].value
+
+ def get_external_link_args(self, for_machine: MachineChoice, lang: str) -> T.Union[str, T.List[str]]:
+ return self.options[OptionKey('link_args', machine=for_machine, lang=lang)].value
+
+ def update_project_options(self, options: 'KeyedOptionDictType') -> None:
+ for key, value in options.items():
+ if not key.is_project():
+ continue
+ if key not in self.options:
+ self.options[key] = value
+ continue
+
+ oldval = self.options[key]
+ if type(oldval) != type(value):
+ self.options[key] = value
+ elif oldval.choices != value.choices:
+ # If the choices have changed, use the new value, but attempt
+ # to keep the old options. If they are not valid keep the new
+ # defaults but warn.
+ self.options[key] = value
+ try:
+ value.set_value(oldval.value)
+ except MesonException as e:
+ mlog.warning(f'Old value(s) of {key} are no longer valid, resetting to default ({value.value}).')
+
+ def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool:
+ if when_building_for == MachineChoice.BUILD:
+ return False
+ return len(self.cross_files) > 0
+
+ def copy_build_options_from_regular_ones(self) -> None:
+ assert not self.is_cross_build()
+ for k in BUILTIN_OPTIONS_PER_MACHINE:
+ o = self.options[k]
+ self.options[k.as_build()].set_value(o.value)
+ for bk, bv in self.options.items():
+ if bk.machine is MachineChoice.BUILD:
+ hk = bk.as_host()
+ try:
+ hv = self.options[hk]
+ bv.set_value(hv.value)
+ except KeyError:
+ continue
+
+ def set_options(self, options: T.Dict[OptionKey, T.Any], subproject: str = '', warn_unknown: bool = True) -> None:
+ if not self.is_cross_build():
+ options = {k: v for k, v in options.items() if k.machine is not MachineChoice.BUILD}
+ # Set prefix first because it's needed to sanitize other options
+ pfk = OptionKey('prefix')
+ if pfk in options:
+ prefix = self.sanitize_prefix(options[pfk])
+ self.options[OptionKey('prefix')].set_value(prefix)
+ for key in BULITIN_DIR_NOPREFIX_OPTIONS:
+ if key not in options:
+ self.options[key].set_value(BUILTIN_OPTIONS[key].prefixed_default(key, prefix))
+
+ unknown_options: T.List[OptionKey] = []
+ for k, v in options.items():
+ if k == pfk:
+ continue
+ elif k not in self.options:
+ unknown_options.append(k)
+ else:
+ self.set_option(k, v)
+ if unknown_options and warn_unknown:
+ unknown_options_str = ', '.join(sorted(str(s) for s in unknown_options))
+ sub = f'In subproject {subproject}: ' if subproject else ''
+ mlog.warning(f'{sub}Unknown options: "{unknown_options_str}"')
+ mlog.log('The value of new options can be set with:')
+ mlog.log(mlog.bold('meson setup <builddir> --reconfigure -Dnew_option=new_value ...'))
+ if not self.is_cross_build():
+ self.copy_build_options_from_regular_ones()
+
+ def set_default_options(self, default_options: T.MutableMapping[OptionKey, str], subproject: str, env: 'Environment') -> None:
+ # Preserve order: if env.options has 'buildtype' it must come after
+ # 'optimization' if it is in default_options.
+ options: T.MutableMapping[OptionKey, T.Any]
+ if not subproject:
+ options = OrderedDict(default_options)
+ options.update(env.options)
+ env.options = options
+
+ # Create a subset of options, keeping only project and builtin
+ # options for this subproject.
+ # Language and backend specific options will be set later when adding
+ # languages and setting the backend (builtin options must be set first
+ # to know which backend we'll use).
+ options = OrderedDict()
+
+ for k, v in chain(default_options.items(), env.options.items()):
+ # If this is a subproject, don't use other subproject options
+ if k.subproject and k.subproject != subproject:
+ continue
+ # If the option is a builtin and is yielding then it's not allowed per subproject.
+ #
+ # Always test this using the HOST machine, as many builtin options
+ # are not valid for the BUILD machine, but the yielding value does
+ # not differ between them even when they are valid for both.
+ if subproject and k.is_builtin() and self.options[k.evolve(subproject='', machine=MachineChoice.HOST)].yielding:
+ continue
+ # Skip base, compiler, and backend options, they are handled when
+ # adding languages and setting backend.
+ if k.type in {OptionType.COMPILER, OptionType.BACKEND, OptionType.BASE}:
+ continue
+ options[k] = v
+
+ self.set_options(options, subproject=subproject)
+
+ def add_compiler_options(self, options: 'KeyedOptionDictType', lang: str, for_machine: MachineChoice,
+ env: 'Environment') -> None:
+ for k, o in options.items():
+ value = env.options.get(k)
+ if value is not None:
+ o.set_value(value)
+ self.options.setdefault(k, o)
+
+ def add_lang_args(self, lang: str, comp: T.Type['Compiler'],
+ for_machine: MachineChoice, env: 'Environment') -> None:
+ """Add global language arguments that are needed before compiler/linker detection."""
+ from .compilers import compilers
+ # These options are all new at this point, because the compiler is
+ # responsible for adding its own options, thus calling
+ # `self.options.update()`` is perfectly safe.
+ self.options.update(compilers.get_global_options(lang, comp, for_machine, env))
+
+ def process_new_compiler(self, lang: str, comp: 'Compiler', env: 'Environment') -> None:
+ from . import compilers
+
+ self.compilers[comp.for_machine][lang] = comp
+ self.add_compiler_options(comp.get_options(), lang, comp.for_machine, env)
+
+ enabled_opts: T.List[OptionKey] = []
+ for key in comp.base_options:
+ if key in self.options:
+ continue
+ oobj = compilers.base_options[key]
+ if key in env.options:
+ oobj.set_value(env.options[key])
+ enabled_opts.append(key)
+ self.options[key] = oobj
+ self.emit_base_options_warnings(enabled_opts)
+
+ def emit_base_options_warnings(self, enabled_opts: T.List[OptionKey]) -> None:
+ if OptionKey('b_bitcode') in enabled_opts:
+ mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as \'b_asneeded\' have been disabled.', fatal=False)
+ mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.', fatal=False)
+
+class CmdLineFileParser(configparser.ConfigParser):
+ def __init__(self) -> None:
+ # We don't want ':' as key delimiter, otherwise it would break when
+ # storing subproject options like "subproject:option=value"
+ super().__init__(delimiters=['='], interpolation=None)
+
+ def optionxform(self, option: str) -> str:
+ # Don't call str.lower() on keys
+ return option
+
+class MachineFileParser():
+ def __init__(self, filenames: T.List[str]) -> None:
+ self.parser = CmdLineFileParser()
+ self.constants = {'True': True, 'False': False}
+ self.sections = {}
+
+ self.parser.read(filenames)
+
+ # Parse [constants] first so they can be used in other sections
+ if self.parser.has_section('constants'):
+ self.constants.update(self._parse_section('constants'))
+
+ for s in self.parser.sections():
+ if s == 'constants':
+ continue
+ self.sections[s] = self._parse_section(s)
+
+ def _parse_section(self, s):
+ self.scope = self.constants.copy()
+ section = {}
+ for entry, value in self.parser.items(s):
+ if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
+ raise EnvironmentException(f'Malformed variable name {entry!r} in machine file.')
+ # Windows paths...
+ value = value.replace('\\', '\\\\')
+ try:
+ ast = mparser.Parser(value, 'machinefile').parse()
+ res = self._evaluate_statement(ast.lines[0])
+ except MesonException:
+ raise EnvironmentException(f'Malformed value in machine file variable {entry!r}.')
+ except KeyError as e:
+ raise EnvironmentException(f'Undefined constant {e.args[0]!r} in machine file variable {entry!r}.')
+ section[entry] = res
+ self.scope[entry] = res
+ return section
+
+ def _evaluate_statement(self, node):
+ if isinstance(node, (mparser.StringNode)):
+ return node.value
+ elif isinstance(node, mparser.BooleanNode):
+ return node.value
+ elif isinstance(node, mparser.NumberNode):
+ return node.value
+ elif isinstance(node, mparser.ArrayNode):
+ return [self._evaluate_statement(arg) for arg in node.args.arguments]
+ elif isinstance(node, mparser.IdNode):
+ return self.scope[node.value]
+ elif isinstance(node, mparser.ArithmeticNode):
+ l = self._evaluate_statement(node.left)
+ r = self._evaluate_statement(node.right)
+ if node.operation == 'add':
+ if (isinstance(l, str) and isinstance(r, str)) or \
+ (isinstance(l, list) and isinstance(r, list)):
+ return l + r
+ elif node.operation == 'div':
+ if isinstance(l, str) and isinstance(r, str):
+ return os.path.join(l, r)
+ raise EnvironmentException('Unsupported node type')
+
+def parse_machine_files(filenames):
+ parser = MachineFileParser(filenames)
+ return parser.sections
+
+def get_cmd_line_file(build_dir: str) -> str:
+ return os.path.join(build_dir, 'meson-private', 'cmd_line.txt')
+
+def read_cmd_line_file(build_dir: str, options: argparse.Namespace) -> None:
+ filename = get_cmd_line_file(build_dir)
+ if not os.path.isfile(filename):
+ return
+
+ config = CmdLineFileParser()
+ config.read(filename)
+
+ # Do a copy because config is not really a dict. options.cmd_line_options
+ # overrides values from the file.
+ d = {OptionKey.from_string(k): v for k, v in config['options'].items()}
+ d.update(options.cmd_line_options)
+ options.cmd_line_options = d
+
+ properties = config['properties']
+ if not options.cross_file:
+ options.cross_file = ast.literal_eval(properties.get('cross_file', '[]'))
+ if not options.native_file:
+ # This will be a string in the form: "['first', 'second', ...]", use
+ # literal_eval to get it into the list of strings.
+ options.native_file = ast.literal_eval(properties.get('native_file', '[]'))
+
+def write_cmd_line_file(build_dir: str, options: argparse.Namespace) -> None:
+ filename = get_cmd_line_file(build_dir)
+ config = CmdLineFileParser()
+
+ properties = OrderedDict()
+ if options.cross_file:
+ properties['cross_file'] = options.cross_file
+ if options.native_file:
+ properties['native_file'] = options.native_file
+
+ config['options'] = {str(k): str(v) for k, v in options.cmd_line_options.items()}
+ config['properties'] = properties
+ with open(filename, 'w', encoding='utf-8') as f:
+ config.write(f)
+
+def update_cmd_line_file(build_dir: str, options: argparse.Namespace):
+ filename = get_cmd_line_file(build_dir)
+ config = CmdLineFileParser()
+ config.read(filename)
+ config['options'].update({str(k): str(v) for k, v in options.cmd_line_options.items()})
+ with open(filename, 'w', encoding='utf-8') as f:
+ config.write(f)
+
+def get_cmd_line_options(build_dir: str, options: argparse.Namespace) -> str:
+ copy = argparse.Namespace(**vars(options))
+ read_cmd_line_file(build_dir, copy)
+ cmdline = ['-D{}={}'.format(str(k), v) for k, v in copy.cmd_line_options.items()]
+ if options.cross_file:
+ cmdline += [f'--cross-file {f}' for f in options.cross_file]
+ if options.native_file:
+ cmdline += [f'--native-file {f}' for f in options.native_file]
+ return ' '.join([shlex.quote(x) for x in cmdline])
+
+def major_versions_differ(v1: str, v2: str) -> bool:
+ return v1.split('.')[0:2] != v2.split('.')[0:2]
+
+def load(build_dir: str) -> CoreData:
+ filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
+ load_fail_msg = f'Coredata file {filename!r} is corrupted. Try with a fresh build tree.'
+ try:
+ with open(filename, 'rb') as f:
+ obj = pickle.load(f)
+ except (pickle.UnpicklingError, EOFError):
+ raise MesonException(load_fail_msg)
+ except (ModuleNotFoundError, AttributeError):
+ raise MesonException(
+ f"Coredata file {filename!r} references functions or classes that don't "
+ "exist. This probably means that it was generated with an old "
+ "version of meson.")
+ if not isinstance(obj, CoreData):
+ raise MesonException(load_fail_msg)
+ if major_versions_differ(obj.version, version):
+ raise MesonVersionMismatchException(obj.version, version)
+ return obj
+
+def save(obj: CoreData, build_dir: str) -> str:
+ filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
+ prev_filename = filename + '.prev'
+ tempfilename = filename + '~'
+ if major_versions_differ(obj.version, version):
+ raise MesonException('Fatal version mismatch corruption.')
+ if os.path.exists(filename):
+ import shutil
+ shutil.copyfile(filename, prev_filename)
+ with open(tempfilename, 'wb') as f:
+ pickle.dump(obj, f)
+ f.flush()
+ os.fsync(f.fileno())
+ os.replace(tempfilename, filename)
+ return filename
+
+
+def register_builtin_arguments(parser: argparse.ArgumentParser) -> None:
+ for n, b in BUILTIN_OPTIONS.items():
+ b.add_to_argparse(str(n), parser, '')
+ for n, b in BUILTIN_OPTIONS_PER_MACHINE.items():
+ b.add_to_argparse(str(n), parser, ' (just for host machine)')
+ b.add_to_argparse(str(n.as_build()), parser, ' (just for build machine)')
+ parser.add_argument('-D', action='append', dest='projectoptions', default=[], metavar="option",
+ help='Set the value of an option, can be used several times to set multiple options.')
+
+def create_options_dict(options: T.List[str], subproject: str = '') -> T.Dict[OptionKey, str]:
+ result: T.OrderedDict[OptionKey, str] = OrderedDict()
+ for o in options:
+ try:
+ (key, value) = o.split('=', 1)
+ except ValueError:
+ raise MesonException(f'Option {o!r} must have a value separated by equals sign.')
+ k = OptionKey.from_string(key)
+ if subproject:
+ k = k.evolve(subproject=subproject)
+ result[k] = value
+ return result
+
+def parse_cmd_line_options(args: argparse.Namespace) -> None:
+ args.cmd_line_options = create_options_dict(args.projectoptions)
+
+ # Merge builtin options set with --option into the dict.
+ for key in chain(
+ BUILTIN_OPTIONS.keys(),
+ (k.as_build() for k in BUILTIN_OPTIONS_PER_MACHINE.keys()),
+ BUILTIN_OPTIONS_PER_MACHINE.keys(),
+ ):
+ name = str(key)
+ value = getattr(args, name, None)
+ if value is not None:
+ if key in args.cmd_line_options:
+ cmdline_name = BuiltinOption.argparse_name_to_arg(name)
+ raise MesonException(
+ f'Got argument {name} as both -D{name} and {cmdline_name}. Pick one.')
+ args.cmd_line_options[key] = value
+ delattr(args, name)
+
+
+_U = T.TypeVar('_U', bound=UserOption[_T])
+
+class BuiltinOption(T.Generic[_T, _U]):
+
+ """Class for a builtin option type.
+
+ There are some cases that are not fully supported yet.
+ """
+
+ def __init__(self, opt_type: T.Type[_U], description: str, default: T.Any, yielding: bool = True, *,
+ choices: T.Any = None):
+ self.opt_type = opt_type
+ self.description = description
+ self.default = default
+ self.choices = choices
+ self.yielding = yielding
+
+ def init_option(self, name: 'OptionKey', value: T.Optional[T.Any], prefix: str) -> _U:
+ """Create an instance of opt_type and return it."""
+ if value is None:
+ value = self.prefixed_default(name, prefix)
+ keywords = {'yielding': self.yielding, 'value': value}
+ if self.choices:
+ keywords['choices'] = self.choices
+ return self.opt_type(self.description, **keywords)
+
+ def _argparse_action(self) -> T.Optional[str]:
+ # If the type is a boolean, the presence of the argument in --foo form
+ # is to enable it. Disabling happens by using -Dfoo=false, which is
+ # parsed under `args.projectoptions` and does not hit this codepath.
+ if isinstance(self.default, bool):
+ return 'store_true'
+ return None
+
+ def _argparse_choices(self) -> T.Any:
+ if self.opt_type is UserBooleanOption:
+ return [True, False]
+ elif self.opt_type is UserFeatureOption:
+ return UserFeatureOption.static_choices
+ return self.choices
+
+ @staticmethod
+ def argparse_name_to_arg(name: str) -> str:
+ if name == 'warning_level':
+ return '--warnlevel'
+ else:
+ return '--' + name.replace('_', '-')
+
+ def prefixed_default(self, name: 'OptionKey', prefix: str = '') -> T.Any:
+ if self.opt_type in [UserComboOption, UserIntegerOption]:
+ return self.default
+ try:
+ return BULITIN_DIR_NOPREFIX_OPTIONS[name][prefix]
+ except KeyError:
+ pass
+ return self.default
+
+ def add_to_argparse(self, name: str, parser: argparse.ArgumentParser, help_suffix: str) -> None:
+ kwargs = OrderedDict()
+
+ c = self._argparse_choices()
+ b = self._argparse_action()
+ h = self.description
+ if not b:
+ h = '{} (default: {}).'.format(h.rstrip('.'), self.prefixed_default(name))
+ else:
+ kwargs['action'] = b
+ if c and not b:
+ kwargs['choices'] = c
+ kwargs['default'] = argparse.SUPPRESS
+ kwargs['dest'] = name
+
+ cmdline_name = self.argparse_name_to_arg(name)
+ parser.add_argument(cmdline_name, help=h + help_suffix, **kwargs)
+
+
+# Update `docs/markdown/Builtin-options.md` after changing the options below
+# Also update mesonlib._BUILTIN_NAMES. See the comment there for why this is required.
+BUILTIN_DIR_OPTIONS: 'KeyedOptionDictType' = OrderedDict([
+ (OptionKey('prefix'), BuiltinOption(UserStringOption, 'Installation prefix', default_prefix())),
+ (OptionKey('bindir'), BuiltinOption(UserStringOption, 'Executable directory', 'bin')),
+ (OptionKey('datadir'), BuiltinOption(UserStringOption, 'Data file directory', 'share')),
+ (OptionKey('includedir'), BuiltinOption(UserStringOption, 'Header file directory', 'include')),
+ (OptionKey('infodir'), BuiltinOption(UserStringOption, 'Info page directory', 'share/info')),
+ (OptionKey('libdir'), BuiltinOption(UserStringOption, 'Library directory', default_libdir())),
+ (OptionKey('libexecdir'), BuiltinOption(UserStringOption, 'Library executable directory', default_libexecdir())),
+ (OptionKey('localedir'), BuiltinOption(UserStringOption, 'Locale data directory', 'share/locale')),
+ (OptionKey('localstatedir'), BuiltinOption(UserStringOption, 'Localstate data directory', 'var')),
+ (OptionKey('mandir'), BuiltinOption(UserStringOption, 'Manual page directory', 'share/man')),
+ (OptionKey('sbindir'), BuiltinOption(UserStringOption, 'System executable directory', 'sbin')),
+ (OptionKey('sharedstatedir'), BuiltinOption(UserStringOption, 'Architecture-independent data directory', 'com')),
+ (OptionKey('sysconfdir'), BuiltinOption(UserStringOption, 'Sysconf data directory', 'etc')),
+])
+
+BUILTIN_CORE_OPTIONS: 'KeyedOptionDictType' = OrderedDict([
+ (OptionKey('auto_features'), BuiltinOption(UserFeatureOption, "Override value of all 'auto' features", 'auto')),
+ (OptionKey('backend'), BuiltinOption(UserComboOption, 'Backend to use', 'ninja', choices=backendlist)),
+ (OptionKey('buildtype'), BuiltinOption(UserComboOption, 'Build type to use', 'debug',
+ choices=['plain', 'debug', 'debugoptimized', 'release', 'minsize', 'custom'])),
+ (OptionKey('debug'), BuiltinOption(UserBooleanOption, 'Debug', True)),
+ (OptionKey('default_library'), BuiltinOption(UserComboOption, 'Default library type', 'shared', choices=['shared', 'static', 'both'],
+ yielding=False)),
+ (OptionKey('errorlogs'), BuiltinOption(UserBooleanOption, "Whether to print the logs from failing tests", True)),
+ (OptionKey('install_umask'), BuiltinOption(UserUmaskOption, 'Default umask to apply on permissions of installed files', '022')),
+ (OptionKey('layout'), BuiltinOption(UserComboOption, 'Build directory layout', 'mirror', choices=['mirror', 'flat'])),
+ (OptionKey('optimization'), BuiltinOption(UserComboOption, 'Optimization level', '0', choices=['0', 'g', '1', '2', '3', 's'])),
+ (OptionKey('stdsplit'), BuiltinOption(UserBooleanOption, 'Split stdout and stderr in test logs', True)),
+ (OptionKey('strip'), BuiltinOption(UserBooleanOption, 'Strip targets on install', False)),
+ (OptionKey('unity'), BuiltinOption(UserComboOption, 'Unity build', 'off', choices=['on', 'off', 'subprojects'])),
+ (OptionKey('unity_size'), BuiltinOption(UserIntegerOption, 'Unity block size', (2, None, 4))),
+ (OptionKey('warning_level'), BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3'], yielding=False)),
+ (OptionKey('werror'), BuiltinOption(UserBooleanOption, 'Treat warnings as errors', False, yielding=False)),
+ (OptionKey('wrap_mode'), BuiltinOption(UserComboOption, 'Wrap mode', 'default', choices=['default', 'nofallback', 'nodownload', 'forcefallback', 'nopromote'])),
+ (OptionKey('force_fallback_for'), BuiltinOption(UserArrayOption, 'Force fallback for those subprojects', [])),
+])
+
+BUILTIN_OPTIONS = OrderedDict(chain(BUILTIN_DIR_OPTIONS.items(), BUILTIN_CORE_OPTIONS.items()))
+
+BUILTIN_OPTIONS_PER_MACHINE: 'KeyedOptionDictType' = OrderedDict([
+ (OptionKey('pkg_config_path'), BuiltinOption(UserArrayOption, 'List of additional paths for pkg-config to search', [])),
+ (OptionKey('cmake_prefix_path'), BuiltinOption(UserArrayOption, 'List of additional prefixes for cmake to search', [])),
+])
+
+# Special prefix-dependent defaults for installation directories that reside in
+# a path outside of the prefix in FHS and common usage.
+BULITIN_DIR_NOPREFIX_OPTIONS: T.Dict[OptionKey, T.Dict[str, str]] = {
+ OptionKey('sysconfdir'): {'/usr': '/etc'},
+ OptionKey('localstatedir'): {'/usr': '/var', '/usr/local': '/var/local'},
+ OptionKey('sharedstatedir'): {'/usr': '/var/lib', '/usr/local': '/var/local/lib'},
+}
+
+FORBIDDEN_TARGET_NAMES = {'clean': None,
+ 'clean-ctlist': None,
+ 'clean-gcno': None,
+ 'clean-gcda': None,
+ 'coverage': None,
+ 'coverage-text': None,
+ 'coverage-xml': None,
+ 'coverage-html': None,
+ 'phony': None,
+ 'PHONY': None,
+ 'all': None,
+ 'test': None,
+ 'benchmark': None,
+ 'install': None,
+ 'uninstall': None,
+ 'build.ninja': None,
+ 'scan-build': None,
+ 'reconfigure': None,
+ 'dist': None,
+ 'distcheck': None,
+ }
+
diff --git a/meson/mesonbuild/dependencies/__init__.py b/meson/mesonbuild/dependencies/__init__.py
new file mode 100644
index 000000000..bd90c90e9
--- /dev/null
+++ b/meson/mesonbuild/dependencies/__init__.py
@@ -0,0 +1,275 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .boost import BoostDependency
+from .cuda import CudaDependency
+from .hdf5 import hdf5_factory
+from .base import Dependency, InternalDependency, ExternalDependency, NotFoundDependency
+from .base import (
+ ExternalLibrary, DependencyException, DependencyMethods,
+ BuiltinDependency, SystemDependency)
+from .cmake import CMakeDependency
+from .configtool import ConfigToolDependency
+from .dub import DubDependency
+from .framework import ExtraFrameworkDependency
+from .pkgconfig import PkgConfigDependency
+from .factory import DependencyFactory
+from .detect import find_external_dependency, get_dep_identifier, packages, _packages_accept_language
+from .dev import (
+ ValgrindDependency, JDKSystemDependency, gmock_factory, gtest_factory,
+ llvm_factory, zlib_factory)
+from .coarrays import coarray_factory
+from .mpi import mpi_factory
+from .scalapack import scalapack_factory
+from .misc import (
+ BlocksDependency, OpenMPDependency, cups_factory, curses_factory, gpgme_factory,
+ libgcrypt_factory, libwmf_factory, netcdf_factory, pcap_factory, python3_factory,
+ shaderc_factory, threads_factory, ThreadDependency, intl_factory,
+)
+from .platform import AppleFrameworks
+from .qt import qt4_factory, qt5_factory, qt6_factory
+from .ui import GnuStepDependency, WxDependency, gl_factory, sdl2_factory, vulkan_factory
+
+__all__ = [
+ 'Dependency',
+ 'InternalDependency',
+ 'ExternalDependency',
+ 'SystemDependency',
+ 'BuiltinDependency',
+ 'NotFoundDependency',
+ 'ExternalLibrary',
+ 'DependencyException',
+ 'DependencyMethods',
+
+ 'CMakeDependency',
+ 'ConfigToolDependency',
+ 'DubDependency',
+ 'ExtraFrameworkDependency',
+ 'PkgConfigDependency',
+
+ 'DependencyFactory',
+
+ 'ThreadDependency',
+
+ 'find_external_dependency',
+ 'get_dep_identifier',
+]
+
+"""Dependency representations and discovery logic.
+
+Meson attempts to largely abstract away dependency discovery information, and
+to encapsulate that logic itself so that the DSL doesn't have too much direct
+information. There are some cases where this is impossible/undesirable, such
+as the `get_variable()` method.
+
+Meson has four primary dependency types:
+ 1. pkg-config
+ 2. apple frameworks
+ 3. CMake
+ 4. system
+
+Plus a few more niche ones.
+
+When a user calls `dependency('foo')` Meson creates a list of candidates, and
+tries those candidates in order to find one that matches the criteria
+provided by the user (such as version requirements, or optional components
+that are required.)
+
+Except to work around bugs or handle odd corner cases, pkg-config and CMake
+generally just workâ„¢, though there are exceptions. Most of this package is
+concerned with dependencies that don't (always) provide CMake and/or
+pkg-config files.
+
+For these cases one needs to write a `system` dependency. These dependencies
+descend directly from `ExternalDependency`, in their constructor they
+manually set up the necessary link and compile args (and additional
+dependencies as necessary).
+
+For example, imagine a dependency called Foo, it uses an environment variable
+called `$FOO_ROOT` to point to its install root, which looks like this:
+```txt
+$FOOROOT
+→ include/
+→ lib/
+```
+To use Foo, you need its include directory, and you need to link to
+`lib/libfoo.ext`.
+
+You could write code that looks like:
+
+```python
+class FooSystemDependency(ExternalDependency):
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ root = os.environ.get('FOO_ROOT')
+ if root is None:
+ mlog.debug('$FOO_ROOT is unset.')
+ self.is_found = False
+ return
+
+ lib = self.clib_compiler.find_library('foo', environment, [os.path.join(root, 'lib')])
+ if lib is None:
+ mlog.debug('Could not find lib.')
+ self.is_found = False
+ return
+
+ self.compile_args.append(f'-I{os.path.join(root, "include")}')
+ self.link_args.append(lib)
+ self.is_found = True
+```
+
+This code will look for `FOO_ROOT` in the environment, handle `FOO_ROOT` being
+undefined gracefully, then set its `compile_args` and `link_args` gracefully.
+It will also gracefully handle not finding the required lib (hopefully that
+doesn't happen, but it could if, for example, the lib is only static and
+shared linking is requested).
+
+There are a couple of things about this that still aren't ideal. For one, we
+don't want to be reading random environment variables at this point. Those
+should actually be added to `envconfig.Properties` and read in
+`environment.Environment._set_default_properties_from_env` (see how
+`BOOST_ROOT` is handled). We can also handle the `static` keyword. So
+now that becomes:
+
+```python
+class FooSystemDependency(ExternalDependency):
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ root = environment.properties[self.for_machine].foo_root
+ if root is None:
+ mlog.debug('foo_root is unset.')
+ self.is_found = False
+ return
+
+ static = Mesonlib.LibType.STATIC if kwargs.get('static', False) else Mesonlib.LibType.SHARED
+ lib = self.clib_compiler.find_library(
+ 'foo', environment, [os.path.join(root, 'lib')], libtype=static)
+ if lib is None:
+ mlog.debug('Could not find lib.')
+ self.is_found = False
+ return
+
+ self.compile_args.append(f'-I{os.path.join(root, "include")}')
+ self.link_args.append(lib)
+ self.is_found = True
+```
+
+This is nicer in a couple of ways. First we can properly cross compile as we
+are allowed to set `FOO_ROOT` for both the build and host machines, it also
+means that users can override this in their machine files, and if that
+environment variables changes during a Meson reconfigure Meson won't re-read
+it, this is important for reproducibility. Finally, Meson will figure out
+whether it should be finding `libfoo.so` or `libfoo.a` (or the platform
+specific names). Things are looking pretty good now, so it can be added to
+the `packages` dict below:
+
+```python
+packages.update({
+ 'foo': FooSystemDependency,
+})
+```
+
+Now, what if foo also provides pkg-config, but it's only shipped on Unices,
+or only included in very recent versions of the dependency? We can use the
+`DependencyFactory` class:
+
+```python
+foo_factory = DependencyFactory(
+ 'foo',
+ [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+ system_class=FooSystemDependency,
+)
+```
+
+This is a helper function that will generate a default pkg-config based
+dependency, and use the `FooSystemDependency` as well. It can also handle
+custom finders for pkg-config and cmake based dependencies that need some
+extra help. You would then add the `foo_factory` to packages instead of
+`FooSystemDependency`:
+
+```python
+packages.update({
+ 'foo': foo_factory,
+})
+```
+
+If you have a dependency that is very complicated, (such as having multiple
+implementations) you may need to write your own factory function. There are a
+number of examples in this package.
+
+_Note_ before we moved to factory functions it was common to use an
+`ExternalDependency` class that would instantiate different types of
+dependencies and hold the one it found. There are a number of drawbacks to
+this approach, and no new dependencies should do this.
+"""
+
+# This is a dict where the keys should be strings, and the values must be one
+# of:
+# - An ExternalDependency subclass
+# - A DependencyFactory object
+# - A callable with a signature of (Environment, MachineChoice, Dict[str, Any]) -> List[Callable[[], ExternalDependency]]
+packages.update({
+ # From dev:
+ 'gtest': gtest_factory,
+ 'gmock': gmock_factory,
+ 'llvm': llvm_factory,
+ 'valgrind': ValgrindDependency,
+ 'zlib': zlib_factory,
+ 'jdk': JDKSystemDependency,
+
+ 'boost': BoostDependency,
+ 'cuda': CudaDependency,
+
+ # per-file
+ 'coarray': coarray_factory,
+ 'hdf5': hdf5_factory,
+ 'mpi': mpi_factory,
+ 'scalapack': scalapack_factory,
+
+ # From misc:
+ 'blocks': BlocksDependency,
+ 'curses': curses_factory,
+ 'netcdf': netcdf_factory,
+ 'openmp': OpenMPDependency,
+ 'python3': python3_factory,
+ 'threads': threads_factory,
+ 'pcap': pcap_factory,
+ 'cups': cups_factory,
+ 'libwmf': libwmf_factory,
+ 'libgcrypt': libgcrypt_factory,
+ 'gpgme': gpgme_factory,
+ 'shaderc': shaderc_factory,
+ 'intl': intl_factory,
+
+ # From platform:
+ 'appleframeworks': AppleFrameworks,
+
+ # From ui:
+ 'gl': gl_factory,
+ 'gnustep': GnuStepDependency,
+ 'qt4': qt4_factory,
+ 'qt5': qt5_factory,
+ 'qt6': qt6_factory,
+ 'sdl2': sdl2_factory,
+ 'wxwidgets': WxDependency,
+ 'vulkan': vulkan_factory,
+})
+_packages_accept_language.update({
+ 'hdf5',
+ 'mpi',
+ 'netcdf',
+ 'openmp',
+})
diff --git a/meson/mesonbuild/dependencies/base.py b/meson/mesonbuild/dependencies/base.py
new file mode 100644
index 000000000..1882246bf
--- /dev/null
+++ b/meson/mesonbuild/dependencies/base.py
@@ -0,0 +1,573 @@
+# Copyright 2013-2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies.
+# Custom logic for several other packages are in separate files.
+import copy
+import os
+import itertools
+import typing as T
+from enum import Enum
+
+from .. import mlog
+from ..compilers import clib_langs
+from ..mesonlib import MachineChoice, MesonException, HoldableObject
+from ..mesonlib import version_compare_many
+from ..interpreterbase import FeatureDeprecated
+
+if T.TYPE_CHECKING:
+ from ..compilers.compilers import Compiler
+ from ..environment import Environment
+ from ..build import BuildTarget
+ from ..mesonlib import FileOrString
+
+
+class DependencyException(MesonException):
+ '''Exceptions raised while trying to find dependencies'''
+
+
+class DependencyMethods(Enum):
+ # Auto means to use whatever dependency checking mechanisms in whatever order meson thinks is best.
+ AUTO = 'auto'
+ PKGCONFIG = 'pkg-config'
+ CMAKE = 'cmake'
+ # The dependency is provided by the standard library and does not need to be linked
+ BUILTIN = 'builtin'
+ # Just specify the standard link arguments, assuming the operating system provides the library.
+ SYSTEM = 'system'
+ # This is only supported on OSX - search the frameworks directory by name.
+ EXTRAFRAMEWORK = 'extraframework'
+ # Detect using the sysconfig module.
+ SYSCONFIG = 'sysconfig'
+ # Specify using a "program"-config style tool
+ CONFIG_TOOL = 'config-tool'
+ # For backwards compatibility
+ SDLCONFIG = 'sdlconfig'
+ CUPSCONFIG = 'cups-config'
+ PCAPCONFIG = 'pcap-config'
+ LIBWMFCONFIG = 'libwmf-config'
+ QMAKE = 'qmake'
+ # Misc
+ DUB = 'dub'
+
+
+DependencyTypeName = T.NewType('DependencyTypeName', str)
+
+
+class Dependency(HoldableObject):
+
+ @classmethod
+ def _process_include_type_kw(cls, kwargs: T.Dict[str, T.Any]) -> str:
+ if 'include_type' not in kwargs:
+ return 'preserve'
+ if not isinstance(kwargs['include_type'], str):
+ raise DependencyException('The include_type kwarg must be a string type')
+ if kwargs['include_type'] not in ['preserve', 'system', 'non-system']:
+ raise DependencyException("include_type may only be one of ['preserve', 'system', 'non-system']")
+ return kwargs['include_type']
+
+ def __init__(self, type_name: DependencyTypeName, kwargs: T.Dict[str, T.Any]) -> None:
+ self.name = "null"
+ self.version: T.Optional[str] = None
+ self.language: T.Optional[str] = None # None means C-like
+ self.is_found = False
+ self.type_name = type_name
+ self.compile_args: T.List[str] = []
+ self.link_args: T.List[str] = []
+ # Raw -L and -l arguments without manual library searching
+ # If None, self.link_args will be used
+ self.raw_link_args: T.Optional[T.List[str]] = None
+ self.sources: T.List['FileOrString'] = []
+ self.methods = process_method_kw(self.get_methods(), kwargs)
+ self.include_type = self._process_include_type_kw(kwargs)
+ self.ext_deps: T.List[Dependency] = []
+
+ def __repr__(self) -> str:
+ return f'<{self.__class__.__name__} {self.name}: {self.is_found}>'
+
+ def is_built(self) -> bool:
+ return False
+
+ def summary_value(self) -> T.Union[str, mlog.AnsiDecorator, mlog.AnsiText]:
+ if not self.found():
+ return mlog.red('NO')
+ if not self.version:
+ return mlog.green('YES')
+ return mlog.AnsiText(mlog.green('YES'), ' ', mlog.cyan(self.version))
+
+ def get_compile_args(self) -> T.List[str]:
+ if self.include_type == 'system':
+ converted = []
+ for i in self.compile_args:
+ if i.startswith('-I') or i.startswith('/I'):
+ converted += ['-isystem' + i[2:]]
+ else:
+ converted += [i]
+ return converted
+ if self.include_type == 'non-system':
+ converted = []
+ for i in self.compile_args:
+ if i.startswith('-isystem'):
+ converted += ['-I' + i[8:]]
+ else:
+ converted += [i]
+ return converted
+ return self.compile_args
+
+ def get_all_compile_args(self) -> T.List[str]:
+ """Get the compile arguments from this dependency and it's sub dependencies."""
+ return list(itertools.chain(self.get_compile_args(),
+ *[d.get_all_compile_args() for d in self.ext_deps]))
+
+ def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]:
+ if raw and self.raw_link_args is not None:
+ return self.raw_link_args
+ return self.link_args
+
+ def get_all_link_args(self) -> T.List[str]:
+ """Get the link arguments from this dependency and it's sub dependencies."""
+ return list(itertools.chain(self.get_link_args(),
+ *[d.get_all_link_args() for d in self.ext_deps]))
+
+ def found(self) -> bool:
+ return self.is_found
+
+ def get_sources(self) -> T.List['FileOrString']:
+ """Source files that need to be added to the target.
+ As an example, gtest-all.cc when using GTest."""
+ return self.sources
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.AUTO]
+
+ def get_name(self) -> str:
+ return self.name
+
+ def get_version(self) -> str:
+ if self.version:
+ return self.version
+ else:
+ return 'unknown'
+
+ def get_include_type(self) -> str:
+ return self.include_type
+
+ def get_exe_args(self, compiler: 'Compiler') -> T.List[str]:
+ return []
+
+ def get_pkgconfig_variable(self, variable_name: str, kwargs: T.Dict[str, T.Any]) -> str:
+ raise DependencyException(f'{self.name!r} is not a pkgconfig dependency')
+
+ def get_configtool_variable(self, variable_name: str) -> str:
+ raise DependencyException(f'{self.name!r} is not a config-tool dependency')
+
+ def get_partial_dependency(self, *, compile_args: bool = False,
+ link_args: bool = False, links: bool = False,
+ includes: bool = False, sources: bool = False) -> 'Dependency':
+ """Create a new dependency that contains part of the parent dependency.
+
+ The following options can be inherited:
+ links -- all link_with arguments
+ includes -- all include_directory and -I/-isystem calls
+ sources -- any source, header, or generated sources
+ compile_args -- any compile args
+ link_args -- any link args
+
+ Additionally the new dependency will have the version parameter of it's
+ parent (if any) and the requested values of any dependencies will be
+ added as well.
+ """
+ raise RuntimeError('Unreachable code in partial_dependency called')
+
+ def _add_sub_dependency(self, deplist: T.Iterable[T.Callable[[], 'Dependency']]) -> bool:
+ """Add an internal depdency from a list of possible dependencies.
+
+ This method is intended to make it easier to add additional
+ dependencies to another dependency internally.
+
+ Returns true if the dependency was successfully added, false
+ otherwise.
+ """
+ for d in deplist:
+ dep = d()
+ if dep.is_found:
+ self.ext_deps.append(dep)
+ return True
+ return False
+
+ def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
+ configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+ default_value: T.Optional[str] = None,
+ pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]:
+ if default_value is not None:
+ return default_value
+ raise DependencyException(f'No default provided for dependency {self!r}, which is not pkg-config, cmake, or config-tool based.')
+
+ def generate_system_dependency(self, include_type: str) -> 'Dependency':
+ new_dep = copy.deepcopy(self)
+ new_dep.include_type = self._process_include_type_kw({'include_type': include_type})
+ return new_dep
+
+class InternalDependency(Dependency):
+ def __init__(self, version: str, incdirs: T.List[str], compile_args: T.List[str],
+ link_args: T.List[str], libraries: T.List['BuildTarget'],
+ whole_libraries: T.List['BuildTarget'], sources: T.List['FileOrString'],
+ ext_deps: T.List[Dependency], variables: T.Dict[str, T.Any]):
+ super().__init__(DependencyTypeName('internal'), {})
+ self.version = version
+ self.is_found = True
+ self.include_directories = incdirs
+ self.compile_args = compile_args
+ self.link_args = link_args
+ self.libraries = libraries
+ self.whole_libraries = whole_libraries
+ self.sources = sources
+ self.ext_deps = ext_deps
+ self.variables = variables
+
+ def __deepcopy__(self, memo: T.Dict[int, 'InternalDependency']) -> 'InternalDependency':
+ result = self.__class__.__new__(self.__class__)
+ assert isinstance(result, InternalDependency)
+ memo[id(self)] = result
+ for k, v in self.__dict__.items():
+ if k in ['libraries', 'whole_libraries']:
+ setattr(result, k, copy.copy(v))
+ else:
+ setattr(result, k, copy.deepcopy(v, memo))
+ return result
+
+ def summary_value(self) -> mlog.AnsiDecorator:
+ # Omit the version. Most of the time it will be just the project
+ # version, which is uninteresting in the summary.
+ return mlog.green('YES')
+
+ def is_built(self) -> bool:
+ if self.sources or self.libraries or self.whole_libraries:
+ return True
+ return any(d.is_built() for d in self.ext_deps)
+
+ def get_pkgconfig_variable(self, variable_name: str, kwargs: T.Dict[str, T.Any]) -> str:
+ raise DependencyException('Method "get_pkgconfig_variable()" is '
+ 'invalid for an internal dependency')
+
+ def get_configtool_variable(self, variable_name: str) -> str:
+ raise DependencyException('Method "get_configtool_variable()" is '
+ 'invalid for an internal dependency')
+
+ def get_partial_dependency(self, *, compile_args: bool = False,
+ link_args: bool = False, links: bool = False,
+ includes: bool = False, sources: bool = False) -> 'InternalDependency':
+ final_compile_args = self.compile_args.copy() if compile_args else []
+ final_link_args = self.link_args.copy() if link_args else []
+ final_libraries = self.libraries.copy() if links else []
+ final_whole_libraries = self.whole_libraries.copy() if links else []
+ final_sources = self.sources.copy() if sources else []
+ final_includes = self.include_directories.copy() if includes else []
+ final_deps = [d.get_partial_dependency(
+ compile_args=compile_args, link_args=link_args, links=links,
+ includes=includes, sources=sources) for d in self.ext_deps]
+ return InternalDependency(
+ self.version, final_includes, final_compile_args,
+ final_link_args, final_libraries, final_whole_libraries,
+ final_sources, final_deps, self.variables)
+
+ def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
+ configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+ default_value: T.Optional[str] = None,
+ pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]:
+ val = self.variables.get(internal, default_value)
+ if val is not None:
+ # TODO: Try removing this assert by better typing self.variables
+ if isinstance(val, str):
+ return val
+ if isinstance(val, list):
+ for i in val:
+ assert isinstance(i, str)
+ return val
+ raise DependencyException(f'Could not get an internal variable and no default provided for {self!r}')
+
+ def generate_link_whole_dependency(self) -> Dependency:
+ new_dep = copy.deepcopy(self)
+ new_dep.whole_libraries += new_dep.libraries
+ new_dep.libraries = []
+ return new_dep
+
+class HasNativeKwarg:
+ def __init__(self, kwargs: T.Dict[str, T.Any]):
+ self.for_machine = self.get_for_machine_from_kwargs(kwargs)
+
+ def get_for_machine_from_kwargs(self, kwargs: T.Dict[str, T.Any]) -> MachineChoice:
+ return MachineChoice.BUILD if kwargs.get('native', False) else MachineChoice.HOST
+
+class ExternalDependency(Dependency, HasNativeKwarg):
+ def __init__(self, type_name: DependencyTypeName, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None):
+ Dependency.__init__(self, type_name, kwargs)
+ self.env = environment
+ self.name = type_name # default
+ self.is_found = False
+ self.language = language
+ self.version_reqs = kwargs.get('version', None)
+ if isinstance(self.version_reqs, str):
+ self.version_reqs = [self.version_reqs]
+ self.required = kwargs.get('required', True)
+ self.silent = kwargs.get('silent', False)
+ self.static = kwargs.get('static', False)
+ if not isinstance(self.static, bool):
+ raise DependencyException('Static keyword must be boolean')
+ # Is this dependency to be run on the build platform?
+ HasNativeKwarg.__init__(self, kwargs)
+ self.clib_compiler = detect_compiler(self.name, environment, self.for_machine, self.language)
+
+ def get_compiler(self) -> 'Compiler':
+ return self.clib_compiler
+
+ def get_partial_dependency(self, *, compile_args: bool = False,
+ link_args: bool = False, links: bool = False,
+ includes: bool = False, sources: bool = False) -> Dependency:
+ new = copy.copy(self)
+ if not compile_args:
+ new.compile_args = []
+ if not link_args:
+ new.link_args = []
+ if not sources:
+ new.sources = []
+ if not includes:
+ pass # TODO maybe filter compile_args?
+ if not sources:
+ new.sources = []
+
+ return new
+
+ def log_details(self) -> str:
+ return ''
+
+ def log_info(self) -> str:
+ return ''
+
+ def log_tried(self) -> str:
+ return ''
+
+ # Check if dependency version meets the requirements
+ def _check_version(self) -> None:
+ if not self.is_found:
+ return
+
+ if self.version_reqs:
+ # an unknown version can never satisfy any requirement
+ if not self.version:
+ self.is_found = False
+ found_msg: mlog.TV_LoggableList = []
+ found_msg += ['Dependency', mlog.bold(self.name), 'found:']
+ found_msg += [mlog.red('NO'), 'unknown version, but need:', self.version_reqs]
+ mlog.log(*found_msg)
+
+ if self.required:
+ m = f'Unknown version of dependency {self.name!r}, but need {self.version_reqs!r}.'
+ raise DependencyException(m)
+
+ else:
+ (self.is_found, not_found, found) = \
+ version_compare_many(self.version, self.version_reqs)
+ if not self.is_found:
+ found_msg = ['Dependency', mlog.bold(self.name), 'found:']
+ found_msg += [mlog.red('NO'),
+ 'found', mlog.normal_cyan(self.version), 'but need:',
+ mlog.bold(', '.join([f"'{e}'" for e in not_found]))]
+ if found:
+ found_msg += ['; matched:',
+ ', '.join([f"'{e}'" for e in found])]
+ mlog.log(*found_msg)
+
+ if self.required:
+ m = 'Invalid version of dependency, need {!r} {!r} found {!r}.'
+ raise DependencyException(m.format(self.name, not_found, self.version))
+ return
+
+
+class NotFoundDependency(Dependency):
+ def __init__(self, environment: 'Environment') -> None:
+ super().__init__(DependencyTypeName('not-found'), {})
+ self.env = environment
+ self.name = 'not-found'
+ self.is_found = False
+
+ def get_partial_dependency(self, *, compile_args: bool = False,
+ link_args: bool = False, links: bool = False,
+ includes: bool = False, sources: bool = False) -> 'NotFoundDependency':
+ return copy.copy(self)
+
+
+class ExternalLibrary(ExternalDependency):
+ def __init__(self, name: str, link_args: T.List[str], environment: 'Environment',
+ language: str, silent: bool = False) -> None:
+ super().__init__(DependencyTypeName('library'), environment, {}, language=language)
+ self.name = name
+ self.language = language
+ self.is_found = False
+ if link_args:
+ self.is_found = True
+ self.link_args = link_args
+ if not silent:
+ if self.is_found:
+ mlog.log('Library', mlog.bold(name), 'found:', mlog.green('YES'))
+ else:
+ mlog.log('Library', mlog.bold(name), 'found:', mlog.red('NO'))
+
+ def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]:
+ '''
+ External libraries detected using a compiler must only be used with
+ compatible code. For instance, Vala libraries (.vapi files) cannot be
+ used with C code, and not all Rust library types can be linked with
+ C-like code. Note that C++ libraries *can* be linked with C code with
+ a C++ linker (and vice-versa).
+ '''
+ # Using a vala library in a non-vala target, or a non-vala library in a vala target
+ # XXX: This should be extended to other non-C linkers such as Rust
+ if (self.language == 'vala' and language != 'vala') or \
+ (language == 'vala' and self.language != 'vala'):
+ return []
+ return super().get_link_args(language=language, raw=raw)
+
+ def get_partial_dependency(self, *, compile_args: bool = False,
+ link_args: bool = False, links: bool = False,
+ includes: bool = False, sources: bool = False) -> 'ExternalLibrary':
+ # External library only has link_args, so ignore the rest of the
+ # interface.
+ new = copy.copy(self)
+ if not link_args:
+ new.link_args = []
+ return new
+
+
+def sort_libpaths(libpaths: T.List[str], refpaths: T.List[str]) -> T.List[str]:
+ """Sort <libpaths> according to <refpaths>
+
+ It is intended to be used to sort -L flags returned by pkg-config.
+ Pkg-config returns flags in random order which cannot be relied on.
+ """
+ if len(refpaths) == 0:
+ return list(libpaths)
+
+ def key_func(libpath: str) -> T.Tuple[int, int]:
+ common_lengths: T.List[int] = []
+ for refpath in refpaths:
+ try:
+ common_path: str = os.path.commonpath([libpath, refpath])
+ except ValueError:
+ common_path = ''
+ common_lengths.append(len(common_path))
+ max_length = max(common_lengths)
+ max_index = common_lengths.index(max_length)
+ reversed_max_length = len(refpaths[max_index]) - max_length
+ return (max_index, reversed_max_length)
+ return sorted(libpaths, key=key_func)
+
+def strip_system_libdirs(environment: 'Environment', for_machine: MachineChoice, link_args: T.List[str]) -> T.List[str]:
+ """Remove -L<system path> arguments.
+
+ leaving these in will break builds where a user has a version of a library
+ in the system path, and a different version not in the system path if they
+ want to link against the non-system path version.
+ """
+ exclude = {f'-L{p}' for p in environment.get_compiler_system_dirs(for_machine)}
+ return [l for l in link_args if l not in exclude]
+
+def process_method_kw(possible: T.Iterable[DependencyMethods], kwargs: T.Dict[str, T.Any]) -> T.List[DependencyMethods]:
+ method = kwargs.get('method', 'auto') # type: T.Union[DependencyMethods, str]
+ if isinstance(method, DependencyMethods):
+ return [method]
+ # TODO: try/except?
+ if method not in [e.value for e in DependencyMethods]:
+ raise DependencyException(f'method {method!r} is invalid')
+ method = DependencyMethods(method)
+
+ # This sets per-tool config methods which are deprecated to to the new
+ # generic CONFIG_TOOL value.
+ if method in [DependencyMethods.SDLCONFIG, DependencyMethods.CUPSCONFIG,
+ DependencyMethods.PCAPCONFIG, DependencyMethods.LIBWMFCONFIG]:
+ FeatureDeprecated.single_use(f'Configuration method {method.value}', '0.44', 'Use "config-tool" instead.')
+ method = DependencyMethods.CONFIG_TOOL
+ if method is DependencyMethods.QMAKE:
+ FeatureDeprecated.single_use(f'Configuration method "qmake"', '0.58', 'Use "config-tool" instead.')
+ method = DependencyMethods.CONFIG_TOOL
+
+ # Set the detection method. If the method is set to auto, use any available method.
+ # If method is set to a specific string, allow only that detection method.
+ if method == DependencyMethods.AUTO:
+ methods = list(possible)
+ elif method in possible:
+ methods = [method]
+ else:
+ raise DependencyException(
+ 'Unsupported detection method: {}, allowed methods are {}'.format(
+ method.value,
+ mlog.format_list([x.value for x in [DependencyMethods.AUTO] + list(possible)])))
+
+ return methods
+
+def detect_compiler(name: str, env: 'Environment', for_machine: MachineChoice,
+ language: T.Optional[str]) -> T.Optional['Compiler']:
+ """Given a language and environment find the compiler used."""
+ compilers = env.coredata.compilers[for_machine]
+
+ # Set the compiler for this dependency if a language is specified,
+ # else try to pick something that looks usable.
+ if language:
+ if language not in compilers:
+ m = name.capitalize() + ' requires a {0} compiler, but ' \
+ '{0} is not in the list of project languages'
+ raise DependencyException(m.format(language.capitalize()))
+ return compilers[language]
+ else:
+ for lang in clib_langs:
+ try:
+ return compilers[lang]
+ except KeyError:
+ continue
+ return None
+
+
+class SystemDependency(ExternalDependency):
+
+ """Dependency base for System type dependencies."""
+
+ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+ language: T.Optional[str] = None) -> None:
+ super().__init__(DependencyTypeName('system'), env, kwargs, language=language)
+ self.name = name
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.SYSTEM]
+
+ def log_tried(self) -> str:
+ return 'system'
+
+
+class BuiltinDependency(ExternalDependency):
+
+ """Dependency base for Builtin type dependencies."""
+
+ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+ language: T.Optional[str] = None) -> None:
+ super().__init__(DependencyTypeName('builtin'), env, kwargs, language=language)
+ self.name = name
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.BUILTIN]
+
+ def log_tried(self) -> str:
+ return 'builtin'
diff --git a/meson/mesonbuild/dependencies/boost.py b/meson/mesonbuild/dependencies/boost.py
new file mode 100644
index 000000000..4e5af907e
--- /dev/null
+++ b/meson/mesonbuild/dependencies/boost.py
@@ -0,0 +1,1080 @@
+# Copyright 2013-2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+import functools
+import typing as T
+from pathlib import Path
+
+from .. import mlog
+from .. import mesonlib
+from ..environment import Environment
+
+from .base import DependencyException, SystemDependency
+from .pkgconfig import PkgConfigDependency
+from .misc import threads_factory
+
+if T.TYPE_CHECKING:
+ from ..environment import Properties
+
+# On windows 3 directory layouts are supported:
+# * The default layout (versioned) installed:
+# - $BOOST_ROOT/include/boost-x_x/boost/*.hpp
+# - $BOOST_ROOT/lib/*.lib
+# * The non-default layout (system) installed:
+# - $BOOST_ROOT/include/boost/*.hpp
+# - $BOOST_ROOT/lib/*.lib
+# * The pre-built binaries from sf.net:
+# - $BOOST_ROOT/boost/*.hpp
+# - $BOOST_ROOT/lib<arch>-<compiler>/*.lib where arch=32/64 and compiler=msvc-14.1
+#
+# Note that we should also try to support:
+# mingw-w64 / Windows : libboost_<module>-mt.a (location = <prefix>/mingw64/lib/)
+# libboost_<module>-mt.dll.a
+#
+# The `modules` argument accept library names. This is because every module that
+# has libraries to link against also has multiple options regarding how to
+# link. See for example:
+# * http://www.boost.org/doc/libs/1_65_1/libs/test/doc/html/boost_test/usage_variants.html
+# * http://www.boost.org/doc/libs/1_65_1/doc/html/stacktrace/configuration_and_build.html
+# * http://www.boost.org/doc/libs/1_65_1/libs/math/doc/html/math_toolkit/main_tr1.html
+
+# **On Unix**, official packaged versions of boost libraries follow the following schemes:
+#
+# Linux / Debian: libboost_<module>.so -> libboost_<module>.so.1.66.0
+# Linux / Red Hat: libboost_<module>.so -> libboost_<module>.so.1.66.0
+# Linux / OpenSuse: libboost_<module>.so -> libboost_<module>.so.1.66.0
+# Win / Cygwin: libboost_<module>.dll.a (location = /usr/lib)
+# libboost_<module>.a
+# cygboost_<module>_1_64.dll (location = /usr/bin)
+# Win / VS: boost_<module>-vc<ver>-mt[-gd]-<arch>-1_67.dll (location = C:/local/boost_1_67_0)
+# Mac / homebrew: libboost_<module>.dylib + libboost_<module>-mt.dylib (location = /usr/local/lib)
+# Mac / macports: libboost_<module>.dylib + libboost_<module>-mt.dylib (location = /opt/local/lib)
+#
+# Its not clear that any other abi tags (e.g. -gd) are used in official packages.
+#
+# On Linux systems, boost libs have multithreading support enabled, but without the -mt tag.
+#
+# Boost documentation recommends using complex abi tags like "-lboost_regex-gcc34-mt-d-1_36".
+# (See http://www.boost.org/doc/libs/1_66_0/more/getting_started/unix-variants.html#library-naming)
+# However, its not clear that any Unix distribution follows this scheme.
+# Furthermore, the boost documentation for unix above uses examples from windows like
+# "libboost_regex-vc71-mt-d-x86-1_34.lib", so apparently the abi tags may be more aimed at windows.
+#
+# We follow the following strategy for finding modules:
+# A) Detect potential boost root directories (uses also BOOST_ROOT env var)
+# B) Foreach candidate
+# 1. Look for the boost headers (boost/version.pp)
+# 2. Find all boost libraries
+# 2.1 Add all libraries in lib*
+# 2.2 Filter out non boost libraries
+# 2.3 Filter the renaining libraries based on the meson requirements (static/shared, etc.)
+# 2.4 Ensure that all libraries have the same boost tag (and are thus compatible)
+# 3. Select the libraries matching the requested modules
+
+@functools.total_ordering
+class BoostIncludeDir():
+ def __init__(self, path: Path, version_int: int):
+ self.path = path
+ self.version_int = version_int
+ major = int(self.version_int / 100000)
+ minor = int((self.version_int / 100) % 1000)
+ patch = int(self.version_int % 100)
+ self.version = f'{major}.{minor}.{patch}'
+ self.version_lib = f'{major}_{minor}'
+
+ def __repr__(self) -> str:
+ return f'<BoostIncludeDir: {self.version} -- {self.path}>'
+
+ def __lt__(self, other: object) -> bool:
+ if isinstance(other, BoostIncludeDir):
+ return (self.version_int, self.path) < (other.version_int, other.path)
+ return NotImplemented
+
+@functools.total_ordering
+class BoostLibraryFile():
+ # Python libraries are special because of the included
+ # minor version in the module name.
+ boost_python_libs = ['boost_python', 'boost_numpy']
+ reg_python_mod_split = re.compile(r'(boost_[a-zA-Z]+)([0-9]*)')
+
+ reg_abi_tag = re.compile(r'^s?g?y?d?p?n?$')
+ reg_ver_tag = re.compile(r'^[0-9_]+$')
+
+ def __init__(self, path: Path):
+ self.path = path
+ self.name = self.path.name
+
+ # Initialize default properties
+ self.static = False
+ self.toolset = ''
+ self.arch = ''
+ self.version_lib = ''
+ self.mt = True
+
+ self.runtime_static = False
+ self.runtime_debug = False
+ self.python_debug = False
+ self.debug = False
+ self.stlport = False
+ self.deprecated_iostreams = False
+
+ # Post process the library name
+ name_parts = self.name.split('.')
+ self.basename = name_parts[0]
+ self.suffixes = name_parts[1:]
+ self.vers_raw = [x for x in self.suffixes if x.isdigit()]
+ self.suffixes = [x for x in self.suffixes if not x.isdigit()]
+ self.nvsuffix = '.'.join(self.suffixes) # Used for detecting the library type
+ self.nametags = self.basename.split('-')
+ self.mod_name = self.nametags[0]
+ if self.mod_name.startswith('lib'):
+ self.mod_name = self.mod_name[3:]
+
+ # Set library version if possible
+ if len(self.vers_raw) >= 2:
+ self.version_lib = '{}_{}'.format(self.vers_raw[0], self.vers_raw[1])
+
+ # Detecting library type
+ if self.nvsuffix in ['so', 'dll', 'dll.a', 'dll.lib', 'dylib']:
+ self.static = False
+ elif self.nvsuffix in ['a', 'lib']:
+ self.static = True
+ else:
+ raise DependencyException(f'Unable to process library extension "{self.nvsuffix}" ({self.path})')
+
+ # boost_.lib is the dll import library
+ if self.basename.startswith('boost_') and self.nvsuffix == 'lib':
+ self.static = False
+
+ # Process tags
+ tags = self.nametags[1:]
+ # Filter out the python version tag and fix modname
+ if self.is_python_lib():
+ tags = self.fix_python_name(tags)
+ if not tags:
+ return
+
+ # Without any tags mt is assumed, however, an absence of mt in the name
+ # with tags present indicates that the lib was built without mt support
+ self.mt = False
+ for i in tags:
+ if i == 'mt':
+ self.mt = True
+ elif len(i) == 3 and i[1:] in ['32', '64']:
+ self.arch = i
+ elif BoostLibraryFile.reg_abi_tag.match(i):
+ self.runtime_static = 's' in i
+ self.runtime_debug = 'g' in i
+ self.python_debug = 'y' in i
+ self.debug = 'd' in i
+ self.stlport = 'p' in i
+ self.deprecated_iostreams = 'n' in i
+ elif BoostLibraryFile.reg_ver_tag.match(i):
+ self.version_lib = i
+ else:
+ self.toolset = i
+
+ def __repr__(self) -> str:
+ return f'<LIB: {self.abitag} {self.mod_name:<32} {self.path}>'
+
+ def __lt__(self, other: object) -> bool:
+ if isinstance(other, BoostLibraryFile):
+ return (
+ self.mod_name, self.static, self.version_lib, self.arch,
+ not self.mt, not self.runtime_static,
+ not self.debug, self.runtime_debug, self.python_debug,
+ self.stlport, self.deprecated_iostreams,
+ self.name,
+ ) < (
+ other.mod_name, other.static, other.version_lib, other.arch,
+ not other.mt, not other.runtime_static,
+ not other.debug, other.runtime_debug, other.python_debug,
+ other.stlport, other.deprecated_iostreams,
+ other.name,
+ )
+ return NotImplemented
+
+ def __eq__(self, other: object) -> bool:
+ if isinstance(other, BoostLibraryFile):
+ return self.name == other.name
+ return NotImplemented
+
+ def __hash__(self) -> int:
+ return hash(self.name)
+
+ @property
+ def abitag(self) -> str:
+ abitag = ''
+ abitag += 'S' if self.static else '-'
+ abitag += 'M' if self.mt else '-'
+ abitag += ' '
+ abitag += 's' if self.runtime_static else '-'
+ abitag += 'g' if self.runtime_debug else '-'
+ abitag += 'y' if self.python_debug else '-'
+ abitag += 'd' if self.debug else '-'
+ abitag += 'p' if self.stlport else '-'
+ abitag += 'n' if self.deprecated_iostreams else '-'
+ abitag += ' ' + (self.arch or '???')
+ abitag += ' ' + (self.toolset or '?')
+ abitag += ' ' + (self.version_lib or 'x_xx')
+ return abitag
+
+ def is_boost(self) -> bool:
+ return any([self.name.startswith(x) for x in ['libboost_', 'boost_']])
+
+ def is_python_lib(self) -> bool:
+ return any([self.mod_name.startswith(x) for x in BoostLibraryFile.boost_python_libs])
+
+ def fix_python_name(self, tags: T.List[str]) -> T.List[str]:
+ # Handle the boost_python naming madeness.
+ # See https://github.com/mesonbuild/meson/issues/4788 for some distro
+ # specific naming variantions.
+ other_tags = [] # type: T.List[str]
+
+ # Split the current modname into the base name and the version
+ m_cur = BoostLibraryFile.reg_python_mod_split.match(self.mod_name)
+ cur_name = m_cur.group(1)
+ cur_vers = m_cur.group(2)
+
+ # Update the current version string if the new version string is longer
+ def update_vers(new_vers: str) -> None:
+ nonlocal cur_vers
+ new_vers = new_vers.replace('_', '')
+ new_vers = new_vers.replace('.', '')
+ if not new_vers.isdigit():
+ return
+ if len(new_vers) > len(cur_vers):
+ cur_vers = new_vers
+
+ for i in tags:
+ if i.startswith('py'):
+ update_vers(i[2:])
+ elif i.isdigit():
+ update_vers(i)
+ elif len(i) >= 3 and i[0].isdigit and i[2].isdigit() and i[1] == '.':
+ update_vers(i)
+ else:
+ other_tags += [i]
+
+ self.mod_name = cur_name + cur_vers
+ return other_tags
+
+ def mod_name_matches(self, mod_name: str) -> bool:
+ if self.mod_name == mod_name:
+ return True
+ if not self.is_python_lib():
+ return False
+
+ m_cur = BoostLibraryFile.reg_python_mod_split.match(self.mod_name)
+ m_arg = BoostLibraryFile.reg_python_mod_split.match(mod_name)
+
+ if not m_cur or not m_arg:
+ return False
+
+ if m_cur.group(1) != m_arg.group(1):
+ return False
+
+ cur_vers = m_cur.group(2)
+ arg_vers = m_arg.group(2)
+
+ # Always assume python 2 if nothing is specified
+ if not arg_vers:
+ arg_vers = '2'
+
+ return cur_vers.startswith(arg_vers)
+
+ def version_matches(self, version_lib: str) -> bool:
+ # If no version tag is present, assume that it fits
+ if not self.version_lib or not version_lib:
+ return True
+ return self.version_lib == version_lib
+
+ def arch_matches(self, arch: str) -> bool:
+ # If no version tag is present, assume that it fits
+ if not self.arch or not arch:
+ return True
+ return self.arch == arch
+
+ def vscrt_matches(self, vscrt: str) -> bool:
+ # If no vscrt tag present, assume that it fits ['/MD', '/MDd', '/MT', '/MTd']
+ if not vscrt:
+ return True
+ if vscrt in ['/MD', '-MD']:
+ return not self.runtime_static and not self.runtime_debug
+ elif vscrt in ['/MDd', '-MDd']:
+ return not self.runtime_static and self.runtime_debug
+ elif vscrt in ['/MT', '-MT']:
+ return (self.runtime_static or not self.static) and not self.runtime_debug
+ elif vscrt in ['/MTd', '-MTd']:
+ return (self.runtime_static or not self.static) and self.runtime_debug
+
+ mlog.warning(f'Boost: unknow vscrt tag {vscrt}. This may cause the compilation to fail. Please consider reporting this as a bug.', once=True)
+ return True
+
+ def get_compiler_args(self) -> T.List[str]:
+ args = [] # type: T.List[str]
+ if self.mod_name in boost_libraries:
+ libdef = boost_libraries[self.mod_name] # type: BoostLibrary
+ if self.static:
+ args += libdef.static
+ else:
+ args += libdef.shared
+ if self.mt:
+ args += libdef.multi
+ else:
+ args += libdef.single
+ return args
+
+ def get_link_args(self) -> T.List[str]:
+ return [self.path.as_posix()]
+
+class BoostDependency(SystemDependency):
+ def __init__(self, environment: Environment, kwargs: T.Dict[str, T.Any]) -> None:
+ super().__init__('boost', environment, kwargs, language='cpp')
+ buildtype = environment.coredata.get_option(mesonlib.OptionKey('buildtype'))
+ assert isinstance(buildtype, str)
+ self.debug = buildtype.startswith('debug')
+ self.multithreading = kwargs.get('threading', 'multi') == 'multi'
+
+ self.boost_root = None # type: T.Optional[Path]
+ self.explicit_static = 'static' in kwargs
+
+ # Extract and validate modules
+ self.modules = mesonlib.extract_as_list(kwargs, 'modules') # type: T.List[str]
+ for i in self.modules:
+ if not isinstance(i, str):
+ raise DependencyException('Boost module argument is not a string.')
+ if i.startswith('boost_'):
+ raise DependencyException('Boost modules must be passed without the boost_ prefix')
+
+ self.modules_found = [] # type: T.List[str]
+ self.modules_missing = [] # type: T.List[str]
+
+ # Do we need threads?
+ if 'thread' in self.modules:
+ if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
+ self.is_found = False
+ return
+
+ # Try figuring out the architecture tag
+ self.arch = environment.machines[self.for_machine].cpu_family
+ self.arch = boost_arch_map.get(self.arch, None)
+
+ # First, look for paths specified in a machine file
+ props = self.env.properties[self.for_machine]
+ if any(x in self.env.properties[self.for_machine] for x in
+ ['boost_includedir', 'boost_librarydir', 'boost_root']):
+ self.detect_boost_machine_file(props)
+ return
+
+ # Finally, look for paths from .pc files and from searching the filesystem
+ self.detect_roots()
+
+ def check_and_set_roots(self, roots: T.List[Path]) -> None:
+ roots = list(mesonlib.OrderedSet(roots))
+ for j in roots:
+ # 1. Look for the boost headers (boost/version.hpp)
+ mlog.debug(f'Checking potential boost root {j.as_posix()}')
+ inc_dirs = self.detect_inc_dirs(j)
+ inc_dirs = sorted(inc_dirs, reverse=True) # Prefer the newer versions
+
+ # Early abort when boost is not found
+ if not inc_dirs:
+ continue
+
+ lib_dirs = self.detect_lib_dirs(j)
+ self.is_found = self.run_check(inc_dirs, lib_dirs)
+ if self.is_found:
+ self.boost_root = j
+ break
+
+ def detect_boost_machine_file(self, props: 'Properties') -> None:
+ """Detect boost with values in the machine file or environment.
+
+ The machine file values are defaulted to the environment values.
+ """
+ # XXX: if we had a TypedDict we woudn't need this
+ incdir = props.get('boost_includedir')
+ assert incdir is None or isinstance(incdir, str)
+ libdir = props.get('boost_librarydir')
+ assert libdir is None or isinstance(libdir, str)
+
+ if incdir and libdir:
+ inc_dir = Path(incdir)
+ lib_dir = Path(libdir)
+
+ if not inc_dir.is_absolute() or not lib_dir.is_absolute():
+ raise DependencyException('Paths given for boost_includedir and boost_librarydir in machine file must be absolute')
+
+ mlog.debug('Trying to find boost with:')
+ mlog.debug(f' - boost_includedir = {inc_dir}')
+ mlog.debug(f' - boost_librarydir = {lib_dir}')
+
+ return self.detect_split_root(inc_dir, lib_dir)
+
+ elif incdir or libdir:
+ raise DependencyException('Both boost_includedir *and* boost_librarydir have to be set in your machine file (one is not enough)')
+
+ rootdir = props.get('boost_root')
+ # It shouldn't be possible to get here without something in boost_root
+ assert(rootdir)
+
+ raw_paths = mesonlib.stringlistify(rootdir)
+ paths = [Path(x) for x in raw_paths]
+ if paths and any([not x.is_absolute() for x in paths]):
+ raise DependencyException('boost_root path given in machine file must be absolute')
+
+ self.check_and_set_roots(paths)
+
+ def run_check(self, inc_dirs: T.List[BoostIncludeDir], lib_dirs: T.List[Path]) -> bool:
+ mlog.debug(' - potential library dirs: {}'.format([x.as_posix() for x in lib_dirs]))
+ mlog.debug(' - potential include dirs: {}'.format([x.path.as_posix() for x in inc_dirs]))
+
+ # 2. Find all boost libraries
+ libs = [] # type: T.List[BoostLibraryFile]
+ for i in lib_dirs:
+ libs = self.detect_libraries(i)
+ if libs:
+ mlog.debug(f' - found boost library dir: {i}')
+ # mlog.debug(' - raw library list:')
+ # for j in libs:
+ # mlog.debug(' - {}'.format(j))
+ break
+ libs = sorted(set(libs))
+
+ modules = ['boost_' + x for x in self.modules]
+ for inc in inc_dirs:
+ mlog.debug(f' - found boost {inc.version} include dir: {inc.path}')
+ f_libs = self.filter_libraries(libs, inc.version_lib)
+
+ mlog.debug(' - filtered library list:')
+ for j in f_libs:
+ mlog.debug(f' - {j}')
+
+ # 3. Select the libraries matching the requested modules
+ not_found = [] # type: T.List[str]
+ selected_modules = [] # type: T.List[BoostLibraryFile]
+ for mod in modules:
+ found = False
+ for l in f_libs:
+ if l.mod_name_matches(mod):
+ selected_modules += [l]
+ found = True
+ break
+ if not found:
+ not_found += [mod]
+
+ # log the result
+ mlog.debug(' - found:')
+ comp_args = [] # type: T.List[str]
+ link_args = [] # type: T.List[str]
+ for j in selected_modules:
+ c_args = j.get_compiler_args()
+ l_args = j.get_link_args()
+ mlog.debug(' - {:<24} link={} comp={}'.format(j.mod_name, str(l_args), str(c_args)))
+ comp_args += c_args
+ link_args += l_args
+
+ comp_args = list(set(comp_args))
+ link_args = list(set(link_args))
+
+ self.modules_found = [x.mod_name for x in selected_modules]
+ self.modules_found = [x[6:] for x in self.modules_found]
+ self.modules_found = sorted(set(self.modules_found))
+ self.modules_missing = not_found
+ self.modules_missing = [x[6:] for x in self.modules_missing]
+ self.modules_missing = sorted(set(self.modules_missing))
+
+ # if we found all modules we are done
+ if not not_found:
+ self.version = inc.version
+ self.compile_args = ['-I' + inc.path.as_posix()]
+ self.compile_args += comp_args
+ self.compile_args += self._extra_compile_args()
+ self.compile_args = list(mesonlib.OrderedSet(self.compile_args))
+ self.link_args = link_args
+ mlog.debug(f' - final compile args: {self.compile_args}')
+ mlog.debug(f' - final link args: {self.link_args}')
+ return True
+
+ # in case we missed something log it and try again
+ mlog.debug(' - NOT found:')
+ for mod in not_found:
+ mlog.debug(f' - {mod}')
+
+ return False
+
+ def detect_inc_dirs(self, root: Path) -> T.List[BoostIncludeDir]:
+ candidates = [] # type: T.List[Path]
+ inc_root = root / 'include'
+
+ candidates += [root / 'boost']
+ candidates += [inc_root / 'boost']
+ if inc_root.is_dir():
+ for i in inc_root.iterdir():
+ if not i.is_dir() or not i.name.startswith('boost-'):
+ continue
+ candidates += [i / 'boost']
+ candidates = [x for x in candidates if x.is_dir()]
+ candidates = [x / 'version.hpp' for x in candidates]
+ candidates = [x for x in candidates if x.exists()]
+ return [self._include_dir_from_version_header(x) for x in candidates]
+
+ def detect_lib_dirs(self, root: Path) -> T.List[Path]:
+ # First check the system include paths. Only consider those within the
+ # given root path
+ system_dirs_t = self.clib_compiler.get_library_dirs(self.env)
+ system_dirs = [Path(x) for x in system_dirs_t]
+ system_dirs = [x.resolve() for x in system_dirs if x.exists()]
+ system_dirs = [x for x in system_dirs if mesonlib.path_is_in_root(x, root)]
+ system_dirs = list(mesonlib.OrderedSet(system_dirs))
+
+ if system_dirs:
+ return system_dirs
+
+ # No system include paths were found --> fall back to manually looking
+ # for library dirs in root
+ dirs = [] # type: T.List[Path]
+ subdirs = [] # type: T.List[Path]
+ for i in root.iterdir():
+ if i.is_dir() and i.name.startswith('lib'):
+ dirs += [i]
+
+ # Some distros put libraries not directly inside /usr/lib but in /usr/lib/x86_64-linux-gnu
+ for i in dirs:
+ for j in i.iterdir():
+ if j.is_dir() and j.name.endswith('-linux-gnu'):
+ subdirs += [j]
+
+ # Filter out paths that don't match the target arch to avoid finding
+ # the wrong libraries. See https://github.com/mesonbuild/meson/issues/7110
+ if not self.arch:
+ return dirs + subdirs
+
+ arch_list_32 = ['32', 'i386']
+ arch_list_64 = ['64']
+
+ raw_list = dirs + subdirs
+ no_arch = [x for x in raw_list if not any([y in x.name for y in arch_list_32 + arch_list_64])]
+
+ matching_arch = [] # type: T.List[Path]
+ if '32' in self.arch:
+ matching_arch = [x for x in raw_list if any([y in x.name for y in arch_list_32])]
+ elif '64' in self.arch:
+ matching_arch = [x for x in raw_list if any([y in x.name for y in arch_list_64])]
+
+ return sorted(matching_arch) + sorted(no_arch)
+
+ def filter_libraries(self, libs: T.List[BoostLibraryFile], lib_vers: str) -> T.List[BoostLibraryFile]:
+ # MSVC is very picky with the library tags
+ vscrt = ''
+ try:
+ crt_val = self.env.coredata.options[mesonlib.OptionKey('b_vscrt')].value
+ buildtype = self.env.coredata.options[mesonlib.OptionKey('buildtype')].value
+ vscrt = self.clib_compiler.get_crt_compile_args(crt_val, buildtype)[0]
+ except (KeyError, IndexError, AttributeError):
+ pass
+
+ # mlog.debug(' - static: {}'.format(self.static))
+ # mlog.debug(' - not explicit static: {}'.format(not self.explicit_static))
+ # mlog.debug(' - mt: {}'.format(self.multithreading))
+ # mlog.debug(' - version: {}'.format(lib_vers))
+ # mlog.debug(' - arch: {}'.format(self.arch))
+ # mlog.debug(' - vscrt: {}'.format(vscrt))
+ libs = [x for x in libs if x.static == self.static or not self.explicit_static]
+ libs = [x for x in libs if x.mt == self.multithreading]
+ libs = [x for x in libs if x.version_matches(lib_vers)]
+ libs = [x for x in libs if x.arch_matches(self.arch)]
+ libs = [x for x in libs if x.vscrt_matches(vscrt)]
+ libs = [x for x in libs if x.nvsuffix != 'dll'] # Only link to import libraries
+
+ # Only filter by debug when we are building in release mode. Debug
+ # libraries are automatically preferred through sorting otherwise.
+ if not self.debug:
+ libs = [x for x in libs if not x.debug]
+
+ # Take the abitag from the first library and filter by it. This
+ # ensures that we have a set of libraries that are always compatible.
+ if not libs:
+ return []
+ abitag = libs[0].abitag
+ libs = [x for x in libs if x.abitag == abitag]
+
+ return libs
+
+ def detect_libraries(self, libdir: Path) -> T.List[BoostLibraryFile]:
+ libs = [] # type: T.List[BoostLibraryFile]
+ for i in libdir.iterdir():
+ if not i.is_file() or i.is_symlink():
+ continue
+ if not any([i.name.startswith(x) for x in ['libboost_', 'boost_']]):
+ continue
+
+ libs += [BoostLibraryFile(i)]
+ return [x for x in libs if x.is_boost()] # Filter out no boost libraries
+
+ def detect_split_root(self, inc_dir: Path, lib_dir: Path) -> None:
+ boost_inc_dir = None
+ for j in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']:
+ if j.is_file():
+ boost_inc_dir = self._include_dir_from_version_header(j)
+ break
+ if not boost_inc_dir:
+ self.is_found = False
+ return
+
+ self.is_found = self.run_check([boost_inc_dir], [lib_dir])
+
+ def detect_roots(self) -> None:
+ roots = [] # type: T.List[Path]
+
+ # Try getting the BOOST_ROOT from a boost.pc if it exists. This primarily
+ # allows BoostDependency to find boost from Conan. See #5438
+ try:
+ boost_pc = PkgConfigDependency('boost', self.env, {'required': False})
+ if boost_pc.found():
+ boost_root = boost_pc.get_pkgconfig_variable('prefix', {'default': None})
+ if boost_root:
+ roots += [Path(boost_root)]
+ except DependencyException:
+ pass
+
+ # Add roots from system paths
+ inc_paths = [Path(x) for x in self.clib_compiler.get_default_include_dirs()]
+ inc_paths = [x.parent for x in inc_paths if x.exists()]
+ inc_paths = [x.resolve() for x in inc_paths]
+ roots += inc_paths
+
+ # Add system paths
+ if self.env.machines[self.for_machine].is_windows():
+ # Where boost built from source actually installs it
+ c_root = Path('C:/Boost')
+ if c_root.is_dir():
+ roots += [c_root]
+
+ # Where boost documentation says it should be
+ prog_files = Path('C:/Program Files/boost')
+ # Where boost prebuilt binaries are
+ local_boost = Path('C:/local')
+
+ candidates = [] # type: T.List[Path]
+ if prog_files.is_dir():
+ candidates += [*prog_files.iterdir()]
+ if local_boost.is_dir():
+ candidates += [*local_boost.iterdir()]
+
+ roots += [x for x in candidates if x.name.lower().startswith('boost') and x.is_dir()]
+ else:
+ tmp = [] # type: T.List[Path]
+
+ # Homebrew
+ brew_boost = Path('/usr/local/Cellar/boost')
+ if brew_boost.is_dir():
+ tmp += [x for x in brew_boost.iterdir()]
+
+ # Add some default system paths
+ tmp += [Path('/opt/local')]
+ tmp += [Path('/usr/local/opt/boost')]
+ tmp += [Path('/usr/local')]
+ tmp += [Path('/usr')]
+
+ # Cleanup paths
+ tmp = [x for x in tmp if x.is_dir()]
+ tmp = [x.resolve() for x in tmp]
+ roots += tmp
+
+ self.check_and_set_roots(roots)
+
+ def log_details(self) -> str:
+ res = ''
+ if self.modules_found:
+ res += 'found: ' + ', '.join(self.modules_found)
+ if self.modules_missing:
+ if res:
+ res += ' | '
+ res += 'missing: ' + ', '.join(self.modules_missing)
+ return res
+
+ def log_info(self) -> str:
+ if self.boost_root:
+ return self.boost_root.as_posix()
+ return ''
+
+ def _include_dir_from_version_header(self, hfile: Path) -> BoostIncludeDir:
+ # Extract the version with a regex. Using clib_compiler.get_define would
+ # also work, however, this is slower (since it the compiler has to be
+ # invoked) and overkill since the layout of the header is always the same.
+ assert hfile.exists()
+ raw = hfile.read_text(encoding='utf-8')
+ m = re.search(r'#define\s+BOOST_VERSION\s+([0-9]+)', raw)
+ if not m:
+ mlog.debug(f'Failed to extract version information from {hfile}')
+ return BoostIncludeDir(hfile.parents[1], 0)
+ return BoostIncludeDir(hfile.parents[1], int(m.group(1)))
+
+ def _extra_compile_args(self) -> T.List[str]:
+ # BOOST_ALL_DYN_LINK should not be required with the known defines below
+ return ['-DBOOST_ALL_NO_LIB'] # Disable automatic linking
+
+
+# See https://www.boost.org/doc/libs/1_72_0/more/getting_started/unix-variants.html#library-naming
+# See https://mesonbuild.com/Reference-tables.html#cpu-families
+boost_arch_map = {
+ 'aarch64': 'a64',
+ 'arc': 'a32',
+ 'arm': 'a32',
+ 'ia64': 'i64',
+ 'mips': 'm32',
+ 'mips64': 'm64',
+ 'ppc': 'p32',
+ 'ppc64': 'p64',
+ 'sparc': 's32',
+ 'sparc64': 's64',
+ 'x86': 'x32',
+ 'x86_64': 'x64',
+}
+
+
+#### ---- BEGIN GENERATED ---- ####
+# #
+# Generated with tools/boost_names.py:
+# - boost version: 1.73.0
+# - modules found: 159
+# - libraries found: 43
+#
+
+class BoostLibrary():
+ def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]):
+ self.name = name
+ self.shared = shared
+ self.static = static
+ self.single = single
+ self.multi = multi
+
+class BoostModule():
+ def __init__(self, name: str, key: str, desc: str, libs: T.List[str]):
+ self.name = name
+ self.key = key
+ self.desc = desc
+ self.libs = libs
+
+
+# dict of all know libraries with additional compile options
+boost_libraries = {
+ 'boost_atomic': BoostLibrary(
+ name='boost_atomic',
+ shared=['-DBOOST_ATOMIC_DYN_LINK=1'],
+ static=['-DBOOST_ATOMIC_STATIC_LINK=1'],
+ single=[],
+ multi=[],
+ ),
+ 'boost_chrono': BoostLibrary(
+ name='boost_chrono',
+ shared=['-DBOOST_CHRONO_DYN_LINK=1'],
+ static=['-DBOOST_CHRONO_STATIC_LINK=1'],
+ single=['-DBOOST_CHRONO_THREAD_DISABLED'],
+ multi=[],
+ ),
+ 'boost_container': BoostLibrary(
+ name='boost_container',
+ shared=['-DBOOST_CONTAINER_DYN_LINK=1'],
+ static=['-DBOOST_CONTAINER_STATIC_LINK=1'],
+ single=[],
+ multi=[],
+ ),
+ 'boost_context': BoostLibrary(
+ name='boost_context',
+ shared=['-DBOOST_CONTEXT_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_contract': BoostLibrary(
+ name='boost_contract',
+ shared=['-DBOOST_CONTRACT_DYN_LINK'],
+ static=['-DBOOST_CONTRACT_STATIC_LINK'],
+ single=['-DBOOST_CONTRACT_DISABLE_THREADS'],
+ multi=[],
+ ),
+ 'boost_coroutine': BoostLibrary(
+ name='boost_coroutine',
+ shared=['-DBOOST_COROUTINES_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_date_time': BoostLibrary(
+ name='boost_date_time',
+ shared=['-DBOOST_DATE_TIME_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_exception': BoostLibrary(
+ name='boost_exception',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_fiber': BoostLibrary(
+ name='boost_fiber',
+ shared=['-DBOOST_FIBERS_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_fiber_numa': BoostLibrary(
+ name='boost_fiber_numa',
+ shared=['-DBOOST_FIBERS_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_filesystem': BoostLibrary(
+ name='boost_filesystem',
+ shared=['-DBOOST_FILESYSTEM_DYN_LINK=1'],
+ static=['-DBOOST_FILESYSTEM_STATIC_LINK=1'],
+ single=[],
+ multi=[],
+ ),
+ 'boost_graph': BoostLibrary(
+ name='boost_graph',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_iostreams': BoostLibrary(
+ name='boost_iostreams',
+ shared=['-DBOOST_IOSTREAMS_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_locale': BoostLibrary(
+ name='boost_locale',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_log': BoostLibrary(
+ name='boost_log',
+ shared=['-DBOOST_LOG_DYN_LINK=1'],
+ static=[],
+ single=['-DBOOST_LOG_NO_THREADS'],
+ multi=[],
+ ),
+ 'boost_log_setup': BoostLibrary(
+ name='boost_log_setup',
+ shared=['-DBOOST_LOG_SETUP_DYN_LINK=1'],
+ static=[],
+ single=['-DBOOST_LOG_NO_THREADS'],
+ multi=[],
+ ),
+ 'boost_math_c99': BoostLibrary(
+ name='boost_math_c99',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_math_c99f': BoostLibrary(
+ name='boost_math_c99f',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_math_c99l': BoostLibrary(
+ name='boost_math_c99l',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_math_tr1': BoostLibrary(
+ name='boost_math_tr1',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_math_tr1f': BoostLibrary(
+ name='boost_math_tr1f',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_math_tr1l': BoostLibrary(
+ name='boost_math_tr1l',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_mpi': BoostLibrary(
+ name='boost_mpi',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_nowide': BoostLibrary(
+ name='boost_nowide',
+ shared=['-DBOOST_NOWIDE_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_prg_exec_monitor': BoostLibrary(
+ name='boost_prg_exec_monitor',
+ shared=['-DBOOST_TEST_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_program_options': BoostLibrary(
+ name='boost_program_options',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_random': BoostLibrary(
+ name='boost_random',
+ shared=['-DBOOST_RANDOM_DYN_LINK'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_regex': BoostLibrary(
+ name='boost_regex',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_serialization': BoostLibrary(
+ name='boost_serialization',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_stacktrace_addr2line': BoostLibrary(
+ name='boost_stacktrace_addr2line',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_stacktrace_backtrace': BoostLibrary(
+ name='boost_stacktrace_backtrace',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_stacktrace_basic': BoostLibrary(
+ name='boost_stacktrace_basic',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_stacktrace_noop': BoostLibrary(
+ name='boost_stacktrace_noop',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_stacktrace_windbg': BoostLibrary(
+ name='boost_stacktrace_windbg',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_stacktrace_windbg_cached': BoostLibrary(
+ name='boost_stacktrace_windbg_cached',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_system': BoostLibrary(
+ name='boost_system',
+ shared=['-DBOOST_SYSTEM_DYN_LINK=1'],
+ static=['-DBOOST_SYSTEM_STATIC_LINK=1'],
+ single=[],
+ multi=[],
+ ),
+ 'boost_test_exec_monitor': BoostLibrary(
+ name='boost_test_exec_monitor',
+ shared=['-DBOOST_TEST_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_thread': BoostLibrary(
+ name='boost_thread',
+ shared=['-DBOOST_THREAD_BUILD_DLL=1', '-DBOOST_THREAD_USE_DLL=1'],
+ static=['-DBOOST_THREAD_BUILD_LIB=1', '-DBOOST_THREAD_USE_LIB=1'],
+ single=[],
+ multi=[],
+ ),
+ 'boost_timer': BoostLibrary(
+ name='boost_timer',
+ shared=['-DBOOST_TIMER_DYN_LINK=1'],
+ static=['-DBOOST_TIMER_STATIC_LINK=1'],
+ single=[],
+ multi=[],
+ ),
+ 'boost_type_erasure': BoostLibrary(
+ name='boost_type_erasure',
+ shared=['-DBOOST_TYPE_ERASURE_DYN_LINK'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_unit_test_framework': BoostLibrary(
+ name='boost_unit_test_framework',
+ shared=['-DBOOST_TEST_DYN_LINK=1'],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_wave': BoostLibrary(
+ name='boost_wave',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_wserialization': BoostLibrary(
+ name='boost_wserialization',
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+}
+
+# #
+#### ---- END GENERATED ---- ####
diff --git a/meson/mesonbuild/dependencies/cmake.py b/meson/mesonbuild/dependencies/cmake.py
new file mode 100644
index 000000000..047950da5
--- /dev/null
+++ b/meson/mesonbuild/dependencies/cmake.py
@@ -0,0 +1,718 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .base import ExternalDependency, DependencyException, DependencyMethods, DependencyTypeName
+from ..mesonlib import is_windows, MesonException, OptionKey, PerMachine, stringlistify, extract_as_list
+from ..mesondata import mesondata
+from ..cmake import CMakeExecutor, CMakeTraceParser, CMakeException, CMakeToolchain, CMakeExecScope, check_cmake_args, CMakeTarget
+from .. import mlog
+from pathlib import Path
+import functools
+import re
+import os
+import shutil
+import textwrap
+import typing as T
+
+if T.TYPE_CHECKING:
+ from ..environment import Environment
+ from ..envconfig import MachineInfo
+
+class CMakeInfo(T.NamedTuple):
+ module_paths: T.List[str]
+ cmake_root: str
+ archs: T.List[str]
+ common_paths: T.List[str]
+
+class CMakeDependency(ExternalDependency):
+ # The class's copy of the CMake path. Avoids having to search for it
+ # multiple times in the same Meson invocation.
+ class_cmakeinfo: PerMachine[T.Optional[CMakeInfo]] = PerMachine(None, None)
+ # Version string for the minimum CMake version
+ class_cmake_version = '>=3.4'
+ # CMake generators to try (empty for no generator)
+ class_cmake_generators = ['', 'Ninja', 'Unix Makefiles', 'Visual Studio 10 2010']
+ class_working_generator: T.Optional[str] = None
+
+ def _gen_exception(self, msg: str) -> DependencyException:
+ return DependencyException(f'Dependency {self.name} not found: {msg}')
+
+ def _main_cmake_file(self) -> str:
+ return 'CMakeLists.txt'
+
+ def _extra_cmake_opts(self) -> T.List[str]:
+ return []
+
+ def _map_module_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]:
+ # Map the input module list to something else
+ # This function will only be executed AFTER the initial CMake
+ # interpreter pass has completed. Thus variables defined in the
+ # CMakeLists.txt can be accessed here.
+ #
+ # Both the modules and components inputs contain the original lists.
+ return modules
+
+ def _map_component_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]:
+ # Map the input components list to something else. This
+ # function will be executed BEFORE the initial CMake interpreter
+ # pass. Thus variables from the CMakeLists.txt can NOT be accessed.
+ #
+ # Both the modules and components inputs contain the original lists.
+ return components
+
+ def _original_module_name(self, module: str) -> str:
+ # Reverse the module mapping done by _map_module_list for
+ # one module
+ return module
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+ # Gather a list of all languages to support
+ self.language_list = [] # type: T.List[str]
+ if language is None:
+ compilers = None
+ if kwargs.get('native', False):
+ compilers = environment.coredata.compilers.build
+ else:
+ compilers = environment.coredata.compilers.host
+
+ candidates = ['c', 'cpp', 'fortran', 'objc', 'objcxx']
+ self.language_list += [x for x in candidates if x in compilers]
+ else:
+ self.language_list += [language]
+
+ # Add additional languages if required
+ if 'fortran' in self.language_list:
+ self.language_list += ['c']
+
+ # Ensure that the list is unique
+ self.language_list = list(set(self.language_list))
+
+ super().__init__(DependencyTypeName('cmake'), environment, kwargs, language=language)
+ self.name = name
+ self.is_libtool = False
+ # Store a copy of the CMake path on the object itself so it is
+ # stored in the pickled coredata and recovered.
+ self.cmakebin: T.Optional[CMakeExecutor] = None
+ self.cmakeinfo: T.Optional[CMakeInfo] = None
+
+ # Where all CMake "build dirs" are located
+ self.cmake_root_dir = environment.scratch_dir
+
+ # T.List of successfully found modules
+ self.found_modules: T.List[str] = []
+
+ # Initialize with None before the first return to avoid
+ # AttributeError exceptions in derived classes
+ self.traceparser: T.Optional[CMakeTraceParser] = None
+
+ # TODO further evaluate always using MachineChoice.BUILD
+ self.cmakebin = CMakeExecutor(environment, CMakeDependency.class_cmake_version, self.for_machine, silent=self.silent)
+ if not self.cmakebin.found():
+ self.cmakebin = None
+ msg = f'CMake binary for machine {self.for_machine} not found. Giving up.'
+ if self.required:
+ raise DependencyException(msg)
+ mlog.debug(msg)
+ return
+
+ # Setup the trace parser
+ self.traceparser = CMakeTraceParser(self.cmakebin.version(), self._get_build_dir())
+
+ cm_args = stringlistify(extract_as_list(kwargs, 'cmake_args'))
+ cm_args = check_cmake_args(cm_args)
+ if CMakeDependency.class_cmakeinfo[self.for_machine] is None:
+ CMakeDependency.class_cmakeinfo[self.for_machine] = self._get_cmake_info(cm_args)
+ self.cmakeinfo = CMakeDependency.class_cmakeinfo[self.for_machine]
+ if self.cmakeinfo is None:
+ raise self._gen_exception('Unable to obtain CMake system information')
+
+ package_version = kwargs.get('cmake_package_version', '')
+ if not isinstance(package_version, str):
+ raise DependencyException('Keyword "cmake_package_version" must be a string.')
+ components = [(x, True) for x in stringlistify(extract_as_list(kwargs, 'components'))]
+ modules = [(x, True) for x in stringlistify(extract_as_list(kwargs, 'modules'))]
+ modules += [(x, False) for x in stringlistify(extract_as_list(kwargs, 'optional_modules'))]
+ cm_path = stringlistify(extract_as_list(kwargs, 'cmake_module_path'))
+ cm_path = [x if os.path.isabs(x) else os.path.join(environment.get_source_dir(), x) for x in cm_path]
+ if cm_path:
+ cm_args.append('-DCMAKE_MODULE_PATH=' + ';'.join(cm_path))
+ if not self._preliminary_find_check(name, cm_path, self.cmakebin.get_cmake_prefix_paths(), environment.machines[self.for_machine]):
+ mlog.debug('Preliminary CMake check failed. Aborting.')
+ return
+ self._detect_dep(name, package_version, modules, components, cm_args)
+
+ def __repr__(self) -> str:
+ return f'<{self.__class__.__name__} {self.name}: {self.is_found} {self.version_reqs}>'
+
+ def _get_cmake_info(self, cm_args: T.List[str]) -> T.Optional[CMakeInfo]:
+ mlog.debug("Extracting basic cmake information")
+
+ # Try different CMake generators since specifying no generator may fail
+ # in cygwin for some reason
+ gen_list = []
+ # First try the last working generator
+ if CMakeDependency.class_working_generator is not None:
+ gen_list += [CMakeDependency.class_working_generator]
+ gen_list += CMakeDependency.class_cmake_generators
+
+ temp_parser = CMakeTraceParser(self.cmakebin.version(), self._get_build_dir())
+ toolchain = CMakeToolchain(self.cmakebin, self.env, self.for_machine, CMakeExecScope.DEPENDENCY, self._get_build_dir())
+ toolchain.write()
+
+ for i in gen_list:
+ mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto'))
+
+ # Prepare options
+ cmake_opts = temp_parser.trace_args() + toolchain.get_cmake_args() + ['.']
+ cmake_opts += cm_args
+ if len(i) > 0:
+ cmake_opts = ['-G', i] + cmake_opts
+
+ # Run CMake
+ ret1, out1, err1 = self._call_cmake(cmake_opts, 'CMakePathInfo.txt')
+
+ # Current generator was successful
+ if ret1 == 0:
+ CMakeDependency.class_working_generator = i
+ break
+
+ mlog.debug(f'CMake failed to gather system information for generator {i} with error code {ret1}')
+ mlog.debug(f'OUT:\n{out1}\n\n\nERR:\n{err1}\n\n')
+
+ # Check if any generator succeeded
+ if ret1 != 0:
+ return None
+
+ try:
+ temp_parser.parse(err1)
+ except MesonException:
+ return None
+
+ def process_paths(l: T.List[str]) -> T.Set[str]:
+ if is_windows():
+ # Cannot split on ':' on Windows because its in the drive letter
+ tmp = [x.split(os.pathsep) for x in l]
+ else:
+ # https://github.com/mesonbuild/meson/issues/7294
+ tmp = [re.split(r':|;', x) for x in l]
+ flattened = [x for sublist in tmp for x in sublist]
+ return set(flattened)
+
+ # Extract the variables and sanity check them
+ root_paths_set = process_paths(temp_parser.get_cmake_var('MESON_FIND_ROOT_PATH'))
+ root_paths_set.update(process_paths(temp_parser.get_cmake_var('MESON_CMAKE_SYSROOT')))
+ root_paths = sorted(root_paths_set)
+ root_paths = [x for x in root_paths if os.path.isdir(x)]
+ module_paths_set = process_paths(temp_parser.get_cmake_var('MESON_PATHS_LIST'))
+ rooted_paths: T.List[str] = []
+ for j in [Path(x) for x in root_paths]:
+ for p in [Path(x) for x in module_paths_set]:
+ rooted_paths.append(str(j / p.relative_to(p.anchor)))
+ module_paths = sorted(module_paths_set.union(rooted_paths))
+ module_paths = [x for x in module_paths if os.path.isdir(x)]
+ archs = temp_parser.get_cmake_var('MESON_ARCH_LIST')
+
+ common_paths = ['lib', 'lib32', 'lib64', 'libx32', 'share']
+ for i in archs:
+ common_paths += [os.path.join('lib', i)]
+
+ res = CMakeInfo(
+ module_paths=module_paths,
+ cmake_root=temp_parser.get_cmake_var('MESON_CMAKE_ROOT')[0],
+ archs=archs,
+ common_paths=common_paths,
+ )
+
+ mlog.debug(f' -- Module search paths: {res.module_paths}')
+ mlog.debug(f' -- CMake root: {res.cmake_root}')
+ mlog.debug(f' -- CMake architectures: {res.archs}')
+ mlog.debug(f' -- CMake lib search paths: {res.common_paths}')
+
+ return res
+
+ @staticmethod
+ @functools.lru_cache(maxsize=None)
+ def _cached_listdir(path: str) -> T.Tuple[T.Tuple[str, str], ...]:
+ try:
+ return tuple((x, str(x).lower()) for x in os.listdir(path))
+ except OSError:
+ return tuple()
+
+ @staticmethod
+ @functools.lru_cache(maxsize=None)
+ def _cached_isdir(path: str) -> bool:
+ try:
+ return os.path.isdir(path)
+ except OSError:
+ return False
+
+ def _preliminary_find_check(self, name: str, module_path: T.List[str], prefix_path: T.List[str], machine: 'MachineInfo') -> bool:
+ lname = str(name).lower()
+
+ # Checks <path>, <path>/cmake, <path>/CMake
+ def find_module(path: str) -> bool:
+ for i in [path, os.path.join(path, 'cmake'), os.path.join(path, 'CMake')]:
+ if not self._cached_isdir(i):
+ continue
+
+ # Check the directory case insensitive
+ content = self._cached_listdir(i)
+ candidates = ['Find{}.cmake', '{}Config.cmake', '{}-config.cmake']
+ candidates = [x.format(name).lower() for x in candidates]
+ if any([x[1] in candidates for x in content]):
+ return True
+ return False
+
+ # Search in <path>/(lib/<arch>|lib*|share) for cmake files
+ def search_lib_dirs(path: str) -> bool:
+ for i in [os.path.join(path, x) for x in self.cmakeinfo.common_paths]:
+ if not self._cached_isdir(i):
+ continue
+
+ # Check <path>/(lib/<arch>|lib*|share)/cmake/<name>*/
+ cm_dir = os.path.join(i, 'cmake')
+ if self._cached_isdir(cm_dir):
+ content = self._cached_listdir(cm_dir)
+ content = tuple(x for x in content if x[1].startswith(lname))
+ for k in content:
+ if find_module(os.path.join(cm_dir, k[0])):
+ return True
+
+ # <path>/(lib/<arch>|lib*|share)/<name>*/
+ # <path>/(lib/<arch>|lib*|share)/<name>*/(cmake|CMake)/
+ content = self._cached_listdir(i)
+ content = tuple(x for x in content if x[1].startswith(lname))
+ for k in content:
+ if find_module(os.path.join(i, k[0])):
+ return True
+
+ return False
+
+ # Check the user provided and system module paths
+ for i in module_path + [os.path.join(self.cmakeinfo.cmake_root, 'Modules')]:
+ if find_module(i):
+ return True
+
+ # Check the user provided prefix paths
+ for i in prefix_path:
+ if search_lib_dirs(i):
+ return True
+
+ # Check PATH
+ system_env = [] # type: T.List[str]
+ for i in os.environ.get('PATH', '').split(os.pathsep):
+ if i.endswith('/bin') or i.endswith('\\bin'):
+ i = i[:-4]
+ if i.endswith('/sbin') or i.endswith('\\sbin'):
+ i = i[:-5]
+ system_env += [i]
+
+ # Check the system paths
+ for i in self.cmakeinfo.module_paths + system_env:
+ if find_module(i):
+ return True
+
+ if search_lib_dirs(i):
+ return True
+
+ content = self._cached_listdir(i)
+ content = tuple(x for x in content if x[1].startswith(lname))
+ for k in content:
+ if search_lib_dirs(os.path.join(i, k[0])):
+ return True
+
+ # Mac framework support
+ if machine.is_darwin():
+ for j in [f'{lname}.framework', f'{lname}.app']:
+ for k in content:
+ if k[1] != j:
+ continue
+ if find_module(os.path.join(i, k[0], 'Resources')) or find_module(os.path.join(i, k[0], 'Version')):
+ return True
+
+ # Check the environment path
+ env_path = os.environ.get(f'{name}_DIR')
+ if env_path and find_module(env_path):
+ return True
+
+ return False
+
+ def _detect_dep(self, name: str, package_version: str, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]], args: T.List[str]) -> None:
+ # Detect a dependency with CMake using the '--find-package' mode
+ # and the trace output (stderr)
+ #
+ # When the trace output is enabled CMake prints all functions with
+ # parameters to stderr as they are executed. Since CMake 3.4.0
+ # variables ("${VAR}") are also replaced in the trace output.
+ mlog.debug('\nDetermining dependency {!r} with CMake executable '
+ '{!r}'.format(name, self.cmakebin.executable_path()))
+
+ # Try different CMake generators since specifying no generator may fail
+ # in cygwin for some reason
+ gen_list = []
+ # First try the last working generator
+ if CMakeDependency.class_working_generator is not None:
+ gen_list += [CMakeDependency.class_working_generator]
+ gen_list += CMakeDependency.class_cmake_generators
+
+ # Map the components
+ comp_mapped = self._map_component_list(modules, components)
+ toolchain = CMakeToolchain(self.cmakebin, self.env, self.for_machine, CMakeExecScope.DEPENDENCY, self._get_build_dir())
+ toolchain.write()
+
+ for i in gen_list:
+ mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto'))
+
+ # Prepare options
+ cmake_opts = []
+ cmake_opts += [f'-DNAME={name}']
+ cmake_opts += ['-DARCHS={}'.format(';'.join(self.cmakeinfo.archs))]
+ cmake_opts += [f'-DVERSION={package_version}']
+ cmake_opts += ['-DCOMPS={}'.format(';'.join([x[0] for x in comp_mapped]))]
+ cmake_opts += args
+ cmake_opts += self.traceparser.trace_args()
+ cmake_opts += toolchain.get_cmake_args()
+ cmake_opts += self._extra_cmake_opts()
+ cmake_opts += ['.']
+ if len(i) > 0:
+ cmake_opts = ['-G', i] + cmake_opts
+
+ # Run CMake
+ ret1, out1, err1 = self._call_cmake(cmake_opts, self._main_cmake_file())
+
+ # Current generator was successful
+ if ret1 == 0:
+ CMakeDependency.class_working_generator = i
+ break
+
+ mlog.debug(f'CMake failed for generator {i} and package {name} with error code {ret1}')
+ mlog.debug(f'OUT:\n{out1}\n\n\nERR:\n{err1}\n\n')
+
+ # Check if any generator succeeded
+ if ret1 != 0:
+ return
+
+ try:
+ self.traceparser.parse(err1)
+ except CMakeException as e:
+ e2 = self._gen_exception(str(e))
+ if self.required:
+ raise
+ else:
+ self.compile_args = []
+ self.link_args = []
+ self.is_found = False
+ self.reason = e2
+ return
+
+ # Whether the package is found or not is always stored in PACKAGE_FOUND
+ self.is_found = self.traceparser.var_to_bool('PACKAGE_FOUND')
+ if not self.is_found:
+ return
+
+ # Try to detect the version
+ vers_raw = self.traceparser.get_cmake_var('PACKAGE_VERSION')
+
+ if len(vers_raw) > 0:
+ self.version = vers_raw[0]
+ self.version.strip('"\' ')
+
+ # Post-process module list. Used in derived classes to modify the
+ # module list (append prepend a string, etc.).
+ modules = self._map_module_list(modules, components)
+ autodetected_module_list = False
+
+ # Check if we need a DEBUG or RELEASE CMake dependencies
+ is_debug = False
+ if OptionKey('b_vscrt') in self.env.coredata.options:
+ is_debug = self.env.coredata.get_option(OptionKey('buildtype')) == 'debug'
+ if self.env.coredata.options[OptionKey('b_vscrt')].value in {'mdd', 'mtd'}:
+ is_debug = True
+ else:
+ # Don't directly assign to is_debug to make mypy happy
+ debug_opt = self.env.coredata.get_option(OptionKey('debug'))
+ assert isinstance(debug_opt, bool)
+ is_debug = debug_opt
+
+ # Try guessing a CMake target if none is provided
+ if len(modules) == 0:
+ for i in self.traceparser.targets:
+ tg = i.lower()
+ lname = name.lower()
+ if f'{lname}::{lname}' == tg or lname == tg.replace('::', ''):
+ mlog.debug(f'Guessed CMake target \'{i}\'')
+ modules = [(i, True)]
+ autodetected_module_list = True
+ break
+
+ # Failed to guess a target --> try the old-style method
+ if len(modules) == 0:
+ # Warn when there might be matching imported targets but no automatic match was used
+ partial_modules: T.List[CMakeTarget] = []
+ for k, v in self.traceparser.targets.items():
+ tg = k.lower()
+ lname = name.lower()
+ if tg.startswith(f'{lname}::'):
+ partial_modules += [v]
+ if partial_modules:
+ mlog.warning(textwrap.dedent(f'''\
+ Could not find and exact match for the CMake dependency {name}.
+
+ However, Meson found the following partial matches:
+
+ {[x.name for x in partial_modules]}
+
+ Using imported is recommended, since this approach is less error prone
+ and better supported by Meson. Consider explicitly specifying one of
+ these in the dependency call with:
+
+ dependency('{name}', modules: ['{name}::<name>', ...])
+
+ Meson will now continue to use the old-style {name}_LIBRARIES CMake
+ variables to extract the dependency information since no explicit
+ target is currently specified.
+
+ '''))
+ mlog.debug('More info for the partial match targets:')
+ for tgt in partial_modules:
+ mlog.debug(tgt)
+
+
+ incDirs = [x for x in self.traceparser.get_cmake_var('PACKAGE_INCLUDE_DIRS') if x]
+ defs = [x for x in self.traceparser.get_cmake_var('PACKAGE_DEFINITIONS') if x]
+ libs_raw = [x for x in self.traceparser.get_cmake_var('PACKAGE_LIBRARIES') if x]
+
+ # CMake has a "fun" API, where certain keywords describing
+ # configurations can be in the *_LIBRARIES vraiables. See:
+ # - https://github.com/mesonbuild/meson/issues/9197
+ # - https://gitlab.freedesktop.org/libnice/libnice/-/issues/140
+ # - https://cmake.org/cmake/help/latest/command/target_link_libraries.html#overview (the last point in the section)
+ libs: T.List[str] = []
+ cfg_matches = True
+ cm_tag_map = {'debug': is_debug, 'optimized': not is_debug, 'general': True}
+ for i in libs_raw:
+ if i.lower() in cm_tag_map:
+ cfg_matches = cm_tag_map[i.lower()]
+ continue
+ if cfg_matches:
+ libs += [i]
+ # According to the CMake docs, a keyword only works for the
+ # directly the following item and all items without a keyword
+ # are implizitly `general`
+ cfg_matches = True
+
+ # Try to use old style variables if no module is specified
+ if len(libs) > 0:
+ self.compile_args = list(map(lambda x: f'-I{x}', incDirs)) + defs
+ self.link_args = libs
+ mlog.debug(f'using old-style CMake variables for dependency {name}')
+ mlog.debug(f'Include Dirs: {incDirs}')
+ mlog.debug(f'Compiler Definitions: {defs}')
+ mlog.debug(f'Libraries: {libs}')
+ return
+
+ # Even the old-style approach failed. Nothing else we can do here
+ self.is_found = False
+ raise self._gen_exception('CMake: failed to guess a CMake target for {}.\n'
+ 'Try to explicitly specify one or more targets with the "modules" property.\n'
+ 'Valid targets are:\n{}'.format(name, list(self.traceparser.targets.keys())))
+
+ # Set dependencies with CMake targets
+ # recognise arguments we should pass directly to the linker
+ reg_is_lib = re.compile(r'^(-l[a-zA-Z0-9_]+|-pthread|-delayload:[a-zA-Z0-9_\.]+|[a-zA-Z0-9_]+\.lib)$')
+ reg_is_maybe_bare_lib = re.compile(r'^[a-zA-Z0-9_]+$')
+ processed_targets = []
+ incDirs = []
+ compileDefinitions = []
+ compileOptions = []
+ libraries = []
+ for i, required in modules:
+ if i not in self.traceparser.targets:
+ if not required:
+ mlog.warning('CMake: T.Optional module', mlog.bold(self._original_module_name(i)), 'for', mlog.bold(name), 'was not found')
+ continue
+ raise self._gen_exception('CMake: invalid module {} for {}.\n'
+ 'Try to explicitly specify one or more targets with the "modules" property.\n'
+ 'Valid targets are:\n{}'.format(self._original_module_name(i), name, list(self.traceparser.targets.keys())))
+
+ targets = [i]
+ if not autodetected_module_list:
+ self.found_modules += [i]
+
+ while len(targets) > 0:
+ curr = targets.pop(0)
+
+ # Skip already processed targets
+ if curr in processed_targets:
+ continue
+
+ tgt = self.traceparser.targets[curr]
+ cfgs = []
+ cfg = ''
+ otherDeps = []
+ mlog.debug(tgt)
+
+ if 'INTERFACE_INCLUDE_DIRECTORIES' in tgt.properties:
+ incDirs += [x for x in tgt.properties['INTERFACE_INCLUDE_DIRECTORIES'] if x]
+
+ if 'INTERFACE_COMPILE_DEFINITIONS' in tgt.properties:
+ compileDefinitions += ['-D' + re.sub('^-D', '', x) for x in tgt.properties['INTERFACE_COMPILE_DEFINITIONS'] if x]
+
+ if 'INTERFACE_COMPILE_OPTIONS' in tgt.properties:
+ compileOptions += [x for x in tgt.properties['INTERFACE_COMPILE_OPTIONS'] if x]
+
+ if 'IMPORTED_CONFIGURATIONS' in tgt.properties:
+ cfgs = [x for x in tgt.properties['IMPORTED_CONFIGURATIONS'] if x]
+ cfg = cfgs[0]
+
+ if is_debug:
+ if 'DEBUG' in cfgs:
+ cfg = 'DEBUG'
+ elif 'RELEASE' in cfgs:
+ cfg = 'RELEASE'
+ else:
+ if 'RELEASE' in cfgs:
+ cfg = 'RELEASE'
+
+ if f'IMPORTED_IMPLIB_{cfg}' in tgt.properties:
+ libraries += [x for x in tgt.properties[f'IMPORTED_IMPLIB_{cfg}'] if x]
+ elif 'IMPORTED_IMPLIB' in tgt.properties:
+ libraries += [x for x in tgt.properties['IMPORTED_IMPLIB'] if x]
+ elif f'IMPORTED_LOCATION_{cfg}' in tgt.properties:
+ libraries += [x for x in tgt.properties[f'IMPORTED_LOCATION_{cfg}'] if x]
+ elif 'IMPORTED_LOCATION' in tgt.properties:
+ libraries += [x for x in tgt.properties['IMPORTED_LOCATION'] if x]
+
+ if 'INTERFACE_LINK_LIBRARIES' in tgt.properties:
+ otherDeps += [x for x in tgt.properties['INTERFACE_LINK_LIBRARIES'] if x]
+
+ if f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}' in tgt.properties:
+ otherDeps += [x for x in tgt.properties[f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}'] if x]
+ elif 'IMPORTED_LINK_DEPENDENT_LIBRARIES' in tgt.properties:
+ otherDeps += [x for x in tgt.properties['IMPORTED_LINK_DEPENDENT_LIBRARIES'] if x]
+
+ for j in otherDeps:
+ if j in self.traceparser.targets:
+ targets += [j]
+ elif reg_is_lib.match(j):
+ libraries += [j]
+ elif os.path.isabs(j) and os.path.exists(j):
+ libraries += [j]
+ elif self.env.machines.build.is_windows() and reg_is_maybe_bare_lib.match(j):
+ # On Windows, CMake library dependencies can be passed as bare library names,
+ # e.g. 'version' should translate into 'version.lib'. CMake brute-forces a
+ # combination of prefix/suffix combinations to find the right library, however
+ # as we do not have a compiler environment available to us, we cannot do the
+ # same, but must assume any bare argument passed which is not also a CMake
+ # target must be a system library we should try to link against
+ libraries += [f"{j}.lib"]
+ else:
+ mlog.warning('CMake: Dependency', mlog.bold(j), 'for', mlog.bold(name), 'target', mlog.bold(self._original_module_name(curr)), 'was not found')
+
+ processed_targets += [curr]
+
+ # Make sure all elements in the lists are unique and sorted
+ incDirs = sorted(set(incDirs))
+ compileDefinitions = sorted(set(compileDefinitions))
+ compileOptions = sorted(set(compileOptions))
+ libraries = sorted(set(libraries))
+
+ mlog.debug(f'Include Dirs: {incDirs}')
+ mlog.debug(f'Compiler Definitions: {compileDefinitions}')
+ mlog.debug(f'Compiler Options: {compileOptions}')
+ mlog.debug(f'Libraries: {libraries}')
+
+ self.compile_args = compileOptions + compileDefinitions + [f'-I{x}' for x in incDirs]
+ self.link_args = libraries
+
+ def _get_build_dir(self) -> Path:
+ build_dir = Path(self.cmake_root_dir) / f'cmake_{self.name}'
+ build_dir.mkdir(parents=True, exist_ok=True)
+ return build_dir
+
+ def _setup_cmake_dir(self, cmake_file: str) -> Path:
+ # Setup the CMake build environment and return the "build" directory
+ build_dir = self._get_build_dir()
+
+ # Remove old CMake cache so we can try out multiple generators
+ cmake_cache = build_dir / 'CMakeCache.txt'
+ cmake_files = build_dir / 'CMakeFiles'
+ if cmake_cache.exists():
+ cmake_cache.unlink()
+ shutil.rmtree(cmake_files.as_posix(), ignore_errors=True)
+
+ # Insert language parameters into the CMakeLists.txt and write new CMakeLists.txt
+ cmake_txt = mesondata['dependencies/data/' + cmake_file].data
+
+ # In general, some Fortran CMake find_package() also require C language enabled,
+ # even if nothing from C is directly used. An easy Fortran example that fails
+ # without C language is
+ # find_package(Threads)
+ # To make this general to
+ # any other language that might need this, we use a list for all
+ # languages and expand in the cmake Project(... LANGUAGES ...) statement.
+ from ..cmake import language_map
+ cmake_language = [language_map[x] for x in self.language_list if x in language_map]
+ if not cmake_language:
+ cmake_language += ['NONE']
+
+ cmake_txt = textwrap.dedent("""
+ cmake_minimum_required(VERSION ${{CMAKE_VERSION}})
+ project(MesonTemp LANGUAGES {})
+ """).format(' '.join(cmake_language)) + cmake_txt
+
+ cm_file = build_dir / 'CMakeLists.txt'
+ cm_file.write_text(cmake_txt, encoding='utf-8')
+ mlog.cmd_ci_include(cm_file.absolute().as_posix())
+
+ return build_dir
+
+ def _call_cmake(self,
+ args: T.List[str],
+ cmake_file: str,
+ env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, T.Optional[str], T.Optional[str]]:
+ build_dir = self._setup_cmake_dir(cmake_file)
+ return self.cmakebin.call(args, build_dir, env=env)
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.CMAKE]
+
+ def log_tried(self) -> str:
+ return self.type_name
+
+ def log_details(self) -> str:
+ modules = [self._original_module_name(x) for x in self.found_modules]
+ modules = sorted(set(modules))
+ if modules:
+ return 'modules: ' + ', '.join(modules)
+ return ''
+
+ def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
+ configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+ default_value: T.Optional[str] = None,
+ pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]:
+ if cmake and self.traceparser is not None:
+ try:
+ v = self.traceparser.vars[cmake]
+ except KeyError:
+ pass
+ else:
+ if len(v) == 1:
+ return v[0]
+ elif v:
+ return v
+ if default_value is not None:
+ return default_value
+ raise DependencyException(f'Could not get cmake variable and no default provided for {self!r}')
diff --git a/meson/mesonbuild/dependencies/coarrays.py b/meson/mesonbuild/dependencies/coarrays.py
new file mode 100644
index 000000000..d9af191a7
--- /dev/null
+++ b/meson/mesonbuild/dependencies/coarrays.py
@@ -0,0 +1,90 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import typing as T
+
+from .base import DependencyMethods, detect_compiler, SystemDependency
+from .cmake import CMakeDependency
+from .pkgconfig import PkgConfigDependency
+from .factory import factory_methods
+
+if T.TYPE_CHECKING:
+ from . factory import DependencyGenerator
+ from ..environment import Environment, MachineChoice
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE, DependencyMethods.SYSTEM})
+def coarray_factory(env: 'Environment',
+ for_machine: 'MachineChoice',
+ kwargs: T.Dict[str, T.Any],
+ methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+ fcid = detect_compiler('coarray', env, for_machine, 'fortran').get_id()
+ candidates: T.List['DependencyGenerator'] = []
+
+ if fcid == 'gcc':
+ # OpenCoarrays is the most commonly used method for Fortran Coarray with GCC
+ if DependencyMethods.PKGCONFIG in methods:
+ for pkg in ['caf-openmpi', 'caf']:
+ candidates.append(functools.partial(
+ PkgConfigDependency, pkg, env, kwargs, language='fortran'))
+
+ if DependencyMethods.CMAKE in methods:
+ if 'modules' not in kwargs:
+ kwargs['modules'] = 'OpenCoarrays::caf_mpi'
+ candidates.append(functools.partial(
+ CMakeDependency, 'OpenCoarrays', env, kwargs, language='fortran'))
+
+ if DependencyMethods.SYSTEM in methods:
+ candidates.append(functools.partial(CoarrayDependency, env, kwargs))
+
+ return candidates
+
+
+class CoarrayDependency(SystemDependency):
+ """
+ Coarrays are a Fortran 2008 feature.
+
+ Coarrays are sometimes implemented via external library (GCC+OpenCoarrays),
+ while other compilers just build in support (Cray, IBM, Intel, NAG).
+ Coarrays may be thought of as a high-level language abstraction of
+ low-level MPI calls.
+ """
+ def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+ super().__init__('coarray', environment, kwargs, language='fortran')
+ kwargs['required'] = False
+ kwargs['silent'] = True
+
+ cid = self.get_compiler().get_id()
+ if cid == 'gcc':
+ # Fallback to single image
+ self.compile_args = ['-fcoarray=single']
+ self.version = 'single image (fallback)'
+ self.is_found = True
+ elif cid == 'intel':
+ # Coarrays are built into Intel compilers, no external library needed
+ self.is_found = True
+ self.link_args = ['-coarray=shared']
+ self.compile_args = self.link_args
+ elif cid == 'intel-cl':
+ # Coarrays are built into Intel compilers, no external library needed
+ self.is_found = True
+ self.compile_args = ['/Qcoarray:shared']
+ elif cid == 'nagfor':
+ # NAG doesn't require any special arguments for Coarray
+ self.is_found = True
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.AUTO, DependencyMethods.CMAKE, DependencyMethods.PKGCONFIG]
diff --git a/meson/mesonbuild/dependencies/configtool.py b/meson/mesonbuild/dependencies/configtool.py
new file mode 100644
index 000000000..623affb2c
--- /dev/null
+++ b/meson/mesonbuild/dependencies/configtool.py
@@ -0,0 +1,178 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .base import ExternalDependency, DependencyException, DependencyMethods, DependencyTypeName
+from ..mesonlib import listify, Popen_safe, split_args, version_compare, version_compare_many
+from ..programs import find_external_program
+from .. import mlog
+import re
+import typing as T
+
+from mesonbuild import mesonlib
+
+if T.TYPE_CHECKING:
+ from ..environment import Environment
+
+class ConfigToolDependency(ExternalDependency):
+
+ """Class representing dependencies found using a config tool.
+
+ Takes the following extra keys in kwargs that it uses internally:
+ :tools List[str]: A list of tool names to use
+ :version_arg str: The argument to pass to the tool to get it's version
+ :returncode_value int: The value of the correct returncode
+ Because some tools are stupid and don't return 0
+ """
+
+ tools: T.Optional[T.List[str]] = None
+ tool_name: T.Optional[str] = None
+ version_arg = '--version'
+ __strip_version = re.compile(r'^[0-9][0-9.]+')
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None):
+ super().__init__(DependencyTypeName('config-tool'), environment, kwargs, language=language)
+ self.name = name
+ # You may want to overwrite the class version in some cases
+ self.tools = listify(kwargs.get('tools', self.tools))
+ if not self.tool_name:
+ self.tool_name = self.tools[0]
+ if 'version_arg' in kwargs:
+ self.version_arg = kwargs['version_arg']
+
+ req_version_raw = kwargs.get('version', None)
+ if req_version_raw is not None:
+ req_version = mesonlib.stringlistify(req_version_raw)
+ else:
+ req_version = []
+ tool, version = self.find_config(req_version, kwargs.get('returncode_value', 0))
+ self.config = tool
+ self.is_found = self.report_config(version, req_version)
+ if not self.is_found:
+ self.config = None
+ return
+ self.version = version
+
+ def _sanitize_version(self, version: str) -> str:
+ """Remove any non-numeric, non-point version suffixes."""
+ m = self.__strip_version.match(version)
+ if m:
+ # Ensure that there isn't a trailing '.', such as an input like
+ # `1.2.3.git-1234`
+ return m.group(0).rstrip('.')
+ return version
+
+ def find_config(self, versions: T.List[str], returncode: int = 0) \
+ -> T.Tuple[T.Optional[T.List[str]], T.Optional[str]]:
+ """Helper method that searches for config tool binaries in PATH and
+ returns the one that best matches the given version requirements.
+ """
+ best_match: T.Tuple[T.Optional[T.List[str]], T.Optional[str]] = (None, None)
+ for potential_bin in find_external_program(
+ self.env, self.for_machine, self.tool_name,
+ self.tool_name, self.tools, allow_default_for_cross=False):
+ if not potential_bin.found():
+ continue
+ tool = potential_bin.get_command()
+ try:
+ p, out = Popen_safe(tool + [self.version_arg])[:2]
+ except (FileNotFoundError, PermissionError):
+ continue
+ if p.returncode != returncode:
+ continue
+
+ out = self._sanitize_version(out.strip())
+ # Some tools, like pcap-config don't supply a version, but also
+ # don't fail with --version, in that case just assume that there is
+ # only one version and return it.
+ if not out:
+ return (tool, None)
+ if versions:
+ is_found = version_compare_many(out, versions)[0]
+ # This allows returning a found version without a config tool,
+ # which is useful to inform the user that you found version x,
+ # but y was required.
+ if not is_found:
+ tool = None
+ if best_match[1]:
+ if version_compare(out, '> {}'.format(best_match[1])):
+ best_match = (tool, out)
+ else:
+ best_match = (tool, out)
+
+ return best_match
+
+ def report_config(self, version: T.Optional[str], req_version: T.List[str]) -> bool:
+ """Helper method to print messages about the tool."""
+
+ found_msg: T.List[T.Union[str, mlog.AnsiDecorator]] = [mlog.bold(self.tool_name), 'found:']
+
+ if self.config is None:
+ found_msg.append(mlog.red('NO'))
+ if version is not None and req_version:
+ found_msg.append(f'found {version!r} but need {req_version!r}')
+ elif req_version:
+ found_msg.append(f'need {req_version!r}')
+ else:
+ found_msg += [mlog.green('YES'), '({})'.format(' '.join(self.config)), version]
+
+ mlog.log(*found_msg)
+
+ return self.config is not None
+
+ def get_config_value(self, args: T.List[str], stage: str) -> T.List[str]:
+ p, out, err = Popen_safe(self.config + args)
+ if p.returncode != 0:
+ if self.required:
+ raise DependencyException(f'Could not generate {stage} for {self.name}.\n{err}')
+ return []
+ return split_args(out)
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.AUTO, DependencyMethods.CONFIG_TOOL]
+
+ def get_configtool_variable(self, variable_name: str) -> str:
+ p, out, _ = Popen_safe(self.config + [f'--{variable_name}'])
+ if p.returncode != 0:
+ if self.required:
+ raise DependencyException(
+ 'Could not get variable "{}" for dependency {}'.format(
+ variable_name, self.name))
+ variable = out.strip()
+ mlog.debug(f'Got config-tool variable {variable_name} : {variable}')
+ return variable
+
+ def log_tried(self) -> str:
+ return self.type_name
+
+ def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
+ configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+ default_value: T.Optional[str] = None,
+ pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]:
+ if configtool:
+ # In the not required case '' (empty string) will be returned if the
+ # variable is not found. Since '' is a valid value to return we
+ # set required to True here to force and error, and use the
+ # finally clause to ensure it's restored.
+ restore = self.required
+ self.required = True
+ try:
+ return self.get_configtool_variable(configtool)
+ except DependencyException:
+ pass
+ finally:
+ self.required = restore
+ if default_value is not None:
+ return default_value
+ raise DependencyException(f'Could not get config-tool variable and no default provided for {self!r}')
diff --git a/meson/mesonbuild/dependencies/cuda.py b/meson/mesonbuild/dependencies/cuda.py
new file mode 100644
index 000000000..6bc0f05e3
--- /dev/null
+++ b/meson/mesonbuild/dependencies/cuda.py
@@ -0,0 +1,291 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import glob
+import re
+import os
+import typing as T
+from pathlib import Path
+
+from .. import mesonlib
+from .. import mlog
+from ..environment import detect_cpu_family
+from .base import DependencyException, SystemDependency
+
+
+if T.TYPE_CHECKING:
+ from ..environment import Environment
+ from ..compilers import Compiler
+
+TV_ResultTuple = T.Tuple[T.Optional[str], T.Optional[str], bool]
+
+class CudaDependency(SystemDependency):
+
+ supported_languages = ['cuda', 'cpp', 'c'] # see also _default_language
+
+ def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+ compilers = environment.coredata.compilers[self.get_for_machine_from_kwargs(kwargs)]
+ language = self._detect_language(compilers)
+ if language not in self.supported_languages:
+ raise DependencyException(f'Language \'{language}\' is not supported by the CUDA Toolkit. Supported languages are {self.supported_languages}.')
+
+ super().__init__('cuda', environment, kwargs, language=language)
+ self.lib_modules: T.Dict[str, T.List[str]] = {}
+ self.requested_modules = self.get_requested(kwargs)
+ if 'cudart' not in self.requested_modules:
+ self.requested_modules = ['cudart'] + self.requested_modules
+
+ (self.cuda_path, self.version, self.is_found) = self._detect_cuda_path_and_version()
+ if not self.is_found:
+ return
+
+ if not os.path.isabs(self.cuda_path):
+ raise DependencyException(f'CUDA Toolkit path must be absolute, got \'{self.cuda_path}\'.')
+
+ # nvcc already knows where to find the CUDA Toolkit, but if we're compiling
+ # a mixed C/C++/CUDA project, we still need to make the include dir searchable
+ if self.language != 'cuda' or len(compilers) > 1:
+ self.incdir = os.path.join(self.cuda_path, 'include')
+ self.compile_args += [f'-I{self.incdir}']
+
+ if self.language != 'cuda':
+ arch_libdir = self._detect_arch_libdir()
+ self.libdir = os.path.join(self.cuda_path, arch_libdir)
+ mlog.debug('CUDA library directory is', mlog.bold(self.libdir))
+ else:
+ self.libdir = None
+
+ self.is_found = self._find_requested_libraries()
+
+ @classmethod
+ def _detect_language(cls, compilers: T.Dict[str, 'Compiler']) -> str:
+ for lang in cls.supported_languages:
+ if lang in compilers:
+ return lang
+ return list(compilers.keys())[0]
+
+ def _detect_cuda_path_and_version(self) -> TV_ResultTuple:
+ self.env_var = self._default_path_env_var()
+ mlog.debug('Default path env var:', mlog.bold(self.env_var))
+
+ version_reqs = self.version_reqs
+ if self.language == 'cuda':
+ nvcc_version = self._strip_patch_version(self.get_compiler().version)
+ mlog.debug('nvcc version:', mlog.bold(nvcc_version))
+ if version_reqs:
+ # make sure nvcc version satisfies specified version requirements
+ (found_some, not_found, found) = mesonlib.version_compare_many(nvcc_version, version_reqs)
+ if not_found:
+ msg = f'The current nvcc version {nvcc_version} does not satisfy the specified CUDA Toolkit version requirements {version_reqs}.'
+ return self._report_dependency_error(msg, (None, None, False))
+
+ # use nvcc version to find a matching CUDA Toolkit
+ version_reqs = [f'={nvcc_version}']
+ else:
+ nvcc_version = None
+
+ paths = [(path, self._cuda_toolkit_version(path), default) for (path, default) in self._cuda_paths()]
+ if version_reqs:
+ return self._find_matching_toolkit(paths, version_reqs, nvcc_version)
+
+ defaults = [(path, version) for (path, version, default) in paths if default]
+ if defaults:
+ return (defaults[0][0], defaults[0][1], True)
+
+ platform_msg = 'set the CUDA_PATH environment variable' if self._is_windows() \
+ else 'set the CUDA_PATH environment variable/create the \'/usr/local/cuda\' symbolic link'
+ msg = f'Please specify the desired CUDA Toolkit version (e.g. dependency(\'cuda\', version : \'>=10.1\')) or {platform_msg} to point to the location of your desired version.'
+ return self._report_dependency_error(msg, (None, None, False))
+
+ def _find_matching_toolkit(self, paths: T.List[TV_ResultTuple], version_reqs: T.List[str], nvcc_version: T.Optional[str]) -> TV_ResultTuple:
+ # keep the default paths order intact, sort the rest in the descending order
+ # according to the toolkit version
+ part_func: T.Callable[[TV_ResultTuple], bool] = lambda t: not t[2]
+ defaults_it, rest_it = mesonlib.partition(part_func, paths)
+ defaults = list(defaults_it)
+ paths = defaults + sorted(rest_it, key=lambda t: mesonlib.Version(t[1]), reverse=True)
+ mlog.debug(f'Search paths: {paths}')
+
+ if nvcc_version and defaults:
+ default_src = f"the {self.env_var} environment variable" if self.env_var else "the \'/usr/local/cuda\' symbolic link"
+ nvcc_warning = 'The default CUDA Toolkit as designated by {} ({}) doesn\'t match the current nvcc version {} and will be ignored.'.format(default_src, os.path.realpath(defaults[0][0]), nvcc_version)
+ else:
+ nvcc_warning = None
+
+ for (path, version, default) in paths:
+ (found_some, not_found, found) = mesonlib.version_compare_many(version, version_reqs)
+ if not not_found:
+ if not default and nvcc_warning:
+ mlog.warning(nvcc_warning)
+ return (path, version, True)
+
+ if nvcc_warning:
+ mlog.warning(nvcc_warning)
+ return (None, None, False)
+
+ def _default_path_env_var(self) -> T.Optional[str]:
+ env_vars = ['CUDA_PATH'] if self._is_windows() else ['CUDA_PATH', 'CUDA_HOME', 'CUDA_ROOT']
+ env_vars = [var for var in env_vars if var in os.environ]
+ user_defaults = {os.environ[var] for var in env_vars}
+ if len(user_defaults) > 1:
+ mlog.warning('Environment variables {} point to conflicting toolkit locations ({}). Toolkit selection might produce unexpected results.'.format(', '.join(env_vars), ', '.join(user_defaults)))
+ return env_vars[0] if env_vars else None
+
+ def _cuda_paths(self) -> T.List[T.Tuple[str, bool]]:
+ return ([(os.environ[self.env_var], True)] if self.env_var else []) \
+ + (self._cuda_paths_win() if self._is_windows() else self._cuda_paths_nix())
+
+ def _cuda_paths_win(self) -> T.List[T.Tuple[str, bool]]:
+ env_vars = os.environ.keys()
+ return [(os.environ[var], False) for var in env_vars if var.startswith('CUDA_PATH_')]
+
+ def _cuda_paths_nix(self) -> T.List[T.Tuple[str, bool]]:
+ # include /usr/local/cuda default only if no env_var was found
+ pattern = '/usr/local/cuda-*' if self.env_var else '/usr/local/cuda*'
+ return [(path, os.path.basename(path) == 'cuda') for path in glob.iglob(pattern)]
+
+ toolkit_version_regex = re.compile(r'^CUDA Version\s+(.*)$')
+ path_version_win_regex = re.compile(r'^v(.*)$')
+ path_version_nix_regex = re.compile(r'^cuda-(.*)$')
+ cudart_version_regex = re.compile(r'#define\s+CUDART_VERSION\s+([0-9]+)')
+
+ def _cuda_toolkit_version(self, path: str) -> str:
+ version = self._read_toolkit_version_txt(path)
+ if version:
+ return version
+ version = self._read_cuda_runtime_api_version(path)
+ if version:
+ return version
+
+ mlog.debug('Falling back to extracting version from path')
+ path_version_regex = self.path_version_win_regex if self._is_windows() else self.path_version_nix_regex
+ try:
+ m = path_version_regex.match(os.path.basename(path))
+ if m:
+ return m.group(1)
+ else:
+ mlog.warning(f'Could not detect CUDA Toolkit version for {path}')
+ except Exception as e:
+ mlog.warning(f'Could not detect CUDA Toolkit version for {path}: {e!s}')
+
+ return '0.0'
+
+ def _read_cuda_runtime_api_version(self, path_str: str) -> T.Optional[str]:
+ path = Path(path_str)
+ for i in path.rglob('cuda_runtime_api.h'):
+ raw = i.read_text(encoding='utf-8')
+ m = self.cudart_version_regex.search(raw)
+ if not m:
+ continue
+ try:
+ vers_int = int(m.group(1))
+ except ValueError:
+ continue
+ # use // for floor instead of / which produces a float
+ major = vers_int // 1000 # type: int
+ minor = (vers_int - major * 1000) // 10 # type: int
+ return f'{major}.{minor}'
+ return None
+
+ def _read_toolkit_version_txt(self, path: str) -> T.Optional[str]:
+ # Read 'version.txt' at the root of the CUDA Toolkit directory to determine the tookit version
+ version_file_path = os.path.join(path, 'version.txt')
+ try:
+ with open(version_file_path, encoding='utf-8') as version_file:
+ version_str = version_file.readline() # e.g. 'CUDA Version 10.1.168'
+ m = self.toolkit_version_regex.match(version_str)
+ if m:
+ return self._strip_patch_version(m.group(1))
+ except Exception as e:
+ mlog.debug(f'Could not read CUDA Toolkit\'s version file {version_file_path}: {e!s}')
+
+ return None
+
+ @classmethod
+ def _strip_patch_version(cls, version: str) -> str:
+ return '.'.join(version.split('.')[:2])
+
+ def _detect_arch_libdir(self) -> str:
+ arch = detect_cpu_family(self.env.coredata.compilers.host)
+ machine = self.env.machines[self.for_machine]
+ msg = '{} architecture is not supported in {} version of the CUDA Toolkit.'
+ if machine.is_windows():
+ libdirs = {'x86': 'Win32', 'x86_64': 'x64'}
+ if arch not in libdirs:
+ raise DependencyException(msg.format(arch, 'Windows'))
+ return os.path.join('lib', libdirs[arch])
+ elif machine.is_linux():
+ libdirs = {'x86_64': 'lib64', 'ppc64': 'lib', 'aarch64': 'lib64', 'loongarch64': 'lib64'}
+ if arch not in libdirs:
+ raise DependencyException(msg.format(arch, 'Linux'))
+ return libdirs[arch]
+ elif machine.is_darwin():
+ libdirs = {'x86_64': 'lib64'}
+ if arch not in libdirs:
+ raise DependencyException(msg.format(arch, 'macOS'))
+ return libdirs[arch]
+ else:
+ raise DependencyException('CUDA Toolkit: unsupported platform.')
+
+ def _find_requested_libraries(self) -> bool:
+ all_found = True
+
+ for module in self.requested_modules:
+ args = self.clib_compiler.find_library(module, self.env, [self.libdir] if self.libdir else [])
+ if args is None:
+ self._report_dependency_error(f'Couldn\'t find requested CUDA module \'{module}\'')
+ all_found = False
+ else:
+ mlog.debug(f'Link args for CUDA module \'{module}\' are {args}')
+ self.lib_modules[module] = args
+
+ return all_found
+
+ def _is_windows(self) -> bool:
+ return self.env.machines[self.for_machine].is_windows()
+
+ @T.overload
+ def _report_dependency_error(self, msg: str) -> None: ...
+
+ @T.overload
+ def _report_dependency_error(self, msg: str, ret_val: TV_ResultTuple) -> TV_ResultTuple: ...
+
+ def _report_dependency_error(self, msg: str, ret_val: T.Optional[TV_ResultTuple] = None) -> T.Optional[TV_ResultTuple]:
+ if self.required:
+ raise DependencyException(msg)
+
+ mlog.debug(msg)
+ return ret_val
+
+ def log_details(self) -> str:
+ module_str = ', '.join(self.requested_modules)
+ return 'modules: ' + module_str
+
+ def log_info(self) -> str:
+ return self.cuda_path if self.cuda_path else ''
+
+ def get_requested(self, kwargs: T.Dict[str, T.Any]) -> T.List[str]:
+ candidates = mesonlib.extract_as_list(kwargs, 'modules')
+ for c in candidates:
+ if not isinstance(c, str):
+ raise DependencyException('CUDA module argument is not a string.')
+ return candidates
+
+ def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]:
+ args = []
+ if self.libdir:
+ args += self.clib_compiler.get_linker_search_args(self.libdir)
+ for lib in self.requested_modules:
+ args += self.lib_modules[lib]
+ return args
diff --git a/meson/mesonbuild/dependencies/data/CMakeLists.txt b/meson/mesonbuild/dependencies/data/CMakeLists.txt
new file mode 100644
index 000000000..acbf64871
--- /dev/null
+++ b/meson/mesonbuild/dependencies/data/CMakeLists.txt
@@ -0,0 +1,98 @@
+# fail noisily if attempt to use this file without setting:
+# cmake_minimum_required(VERSION ${CMAKE_VERSION})
+# project(... LANGUAGES ...)
+
+cmake_policy(SET CMP0000 NEW)
+
+set(PACKAGE_FOUND FALSE)
+set(_packageName "${NAME}")
+string(TOUPPER "${_packageName}" PACKAGE_NAME)
+
+while(TRUE)
+ if ("${VERSION}" STREQUAL "")
+ find_package("${NAME}" QUIET COMPONENTS ${COMPS})
+ else()
+ find_package("${NAME}" "${VERSION}" QUIET COMPONENTS ${COMPS})
+ endif()
+
+ # ARCHS has to be set via the CMD interface
+ if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND OR "${ARCHS}" STREQUAL "")
+ break()
+ endif()
+
+ list(GET ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+ list(REMOVE_AT ARCHS 0)
+endwhile()
+
+if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND)
+ set(PACKAGE_FOUND TRUE)
+
+ # Check the following variables:
+ # FOO_VERSION
+ # Foo_VERSION
+ # FOO_VERSION_STRING
+ # Foo_VERSION_STRING
+ if(NOT DEFINED PACKAGE_VERSION)
+ if(DEFINED ${_packageName}_VERSION)
+ set(PACKAGE_VERSION "${${_packageName}_VERSION}")
+ elseif(DEFINED ${PACKAGE_NAME}_VERSION)
+ set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION}")
+ elseif(DEFINED ${_packageName}_VERSION_STRING)
+ set(PACKAGE_VERSION "${${_packageName}_VERSION_STRING}")
+ elseif(DEFINED ${PACKAGE_NAME}_VERSION_STRING)
+ set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION_STRING}")
+ endif()
+ endif()
+
+ # Check the following variables:
+ # FOO_LIBRARIES
+ # Foo_LIBRARIES
+ # FOO_LIBS
+ # Foo_LIBS
+ set(libs)
+ if(DEFINED ${_packageName}_LIBRARIES)
+ set(libs ${_packageName}_LIBRARIES)
+ elseif(DEFINED ${PACKAGE_NAME}_LIBRARIES)
+ set(libs ${PACKAGE_NAME}_LIBRARIES)
+ elseif(DEFINED ${_packageName}_LIBS)
+ set(libs ${_packageName}_LIBS)
+ elseif(DEFINED ${PACKAGE_NAME}_LIBS)
+ set(libs ${PACKAGE_NAME}_LIBS)
+ endif()
+
+ # Check the following variables:
+ # FOO_INCLUDE_DIRS
+ # Foo_INCLUDE_DIRS
+ # FOO_INCLUDES
+ # Foo_INCLUDES
+ # FOO_INCLUDE_DIR
+ # Foo_INCLUDE_DIR
+ set(includes)
+ if(DEFINED ${_packageName}_INCLUDE_DIRS)
+ set(includes ${_packageName}_INCLUDE_DIRS)
+ elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIRS)
+ set(includes ${PACKAGE_NAME}_INCLUDE_DIRS)
+ elseif(DEFINED ${_packageName}_INCLUDES)
+ set(includes ${_packageName}_INCLUDES)
+ elseif(DEFINED ${PACKAGE_NAME}_INCLUDES)
+ set(includes ${PACKAGE_NAME}_INCLUDES)
+ elseif(DEFINED ${_packageName}_INCLUDE_DIR)
+ set(includes ${_packageName}_INCLUDE_DIR)
+ elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIR)
+ set(includes ${PACKAGE_NAME}_INCLUDE_DIR)
+ endif()
+
+ # Check the following variables:
+ # FOO_DEFINITIONS
+ # Foo_DEFINITIONS
+ set(definitions)
+ if(DEFINED ${_packageName}_DEFINITIONS)
+ set(definitions ${_packageName}_DEFINITIONS)
+ elseif(DEFINED ${PACKAGE_NAME}_DEFINITIONS)
+ set(definitions ${PACKAGE_NAME}_DEFINITIONS)
+ endif()
+
+ set(PACKAGE_INCLUDE_DIRS "${${includes}}")
+ set(PACKAGE_DEFINITIONS "${${definitions}}")
+ set(PACKAGE_LIBRARIES "${${libs}}")
+endif()
diff --git a/meson/mesonbuild/dependencies/data/CMakeListsLLVM.txt b/meson/mesonbuild/dependencies/data/CMakeListsLLVM.txt
new file mode 100644
index 000000000..9d3e41234
--- /dev/null
+++ b/meson/mesonbuild/dependencies/data/CMakeListsLLVM.txt
@@ -0,0 +1,95 @@
+cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION} )
+
+set(PACKAGE_FOUND FALSE)
+
+while(TRUE)
+ find_package(LLVM REQUIRED CONFIG QUIET)
+
+ # ARCHS has to be set via the CMD interface
+ if(LLVM_FOUND OR "${ARCHS}" STREQUAL "")
+ break()
+ endif()
+
+ list(GET ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+ list(REMOVE_AT ARCHS 0)
+endwhile()
+
+if(LLVM_FOUND)
+ set(PACKAGE_FOUND TRUE)
+
+ foreach(mod IN LISTS LLVM_MESON_MODULES)
+ # Reset variables
+ set(out_mods)
+ set(real_mods)
+
+ # Generate a lower and upper case version
+ string(TOLOWER "${mod}" mod_L)
+ string(TOUPPER "${mod}" mod_U)
+
+ # Get the mapped components
+ llvm_map_components_to_libnames(out_mods ${mod} ${mod_L} ${mod_U})
+ list(SORT out_mods)
+ list(REMOVE_DUPLICATES out_mods)
+
+ # Make sure that the modules exist
+ foreach(i IN LISTS out_mods)
+ if(TARGET ${i})
+ list(APPEND real_mods ${i})
+ endif()
+ endforeach()
+
+ # Set the output variables
+ set(MESON_LLVM_TARGETS_${mod} ${real_mods})
+ foreach(i IN LISTS real_mods)
+ set(MESON_TARGET_TO_LLVM_${i} ${mod})
+ endforeach()
+ endforeach()
+
+ # Check the following variables:
+ # LLVM_PACKAGE_VERSION
+ # LLVM_VERSION
+ # LLVM_VERSION_STRING
+ if(NOT DEFINED PACKAGE_VERSION)
+ if(DEFINED LLVM_PACKAGE_VERSION)
+ set(PACKAGE_VERSION "${LLVM_PACKAGE_VERSION}")
+ elseif(DEFINED LLVM_VERSION)
+ set(PACKAGE_VERSION "${LLVM_VERSION}")
+ elseif(DEFINED LLVM_VERSION_STRING)
+ set(PACKAGE_VERSION "${LLVM_VERSION_STRING}")
+ endif()
+ endif()
+
+ # Check the following variables:
+ # LLVM_LIBRARIES
+ # LLVM_LIBS
+ set(libs)
+ if(DEFINED LLVM_LIBRARIES)
+ set(libs LLVM_LIBRARIES)
+ elseif(DEFINED LLVM_LIBS)
+ set(libs LLVM_LIBS)
+ endif()
+
+ # Check the following variables:
+ # LLVM_INCLUDE_DIRS
+ # LLVM_INCLUDES
+ # LLVM_INCLUDE_DIR
+ set(includes)
+ if(DEFINED LLVM_INCLUDE_DIRS)
+ set(includes LLVM_INCLUDE_DIRS)
+ elseif(DEFINED LLVM_INCLUDES)
+ set(includes LLVM_INCLUDES)
+ elseif(DEFINED LLVM_INCLUDE_DIR)
+ set(includes LLVM_INCLUDE_DIR)
+ endif()
+
+ # Check the following variables:
+ # LLVM_DEFINITIONS
+ set(definitions)
+ if(DEFINED LLVM_DEFINITIONS)
+ set(definitions LLVM_DEFINITIONS)
+ endif()
+
+ set(PACKAGE_INCLUDE_DIRS "${${includes}}")
+ set(PACKAGE_DEFINITIONS "${${definitions}}")
+ set(PACKAGE_LIBRARIES "${${libs}}")
+endif()
diff --git a/meson/mesonbuild/dependencies/data/CMakePathInfo.txt b/meson/mesonbuild/dependencies/data/CMakePathInfo.txt
new file mode 100644
index 000000000..662ec5836
--- /dev/null
+++ b/meson/mesonbuild/dependencies/data/CMakePathInfo.txt
@@ -0,0 +1,31 @@
+cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION})
+
+set(TMP_PATHS_LIST)
+list(APPEND TMP_PATHS_LIST ${CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_APPBUNDLE_PATH})
+
+set(LIB_ARCH_LIST)
+if(CMAKE_LIBRARY_ARCHITECTURE_REGEX)
+ file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* )
+ foreach(dir ${implicit_dirs})
+ if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}")
+ list(APPEND LIB_ARCH_LIST "${dir}")
+ endif()
+ endforeach()
+endif()
+
+# "Export" these variables:
+set(MESON_ARCH_LIST ${LIB_ARCH_LIST})
+set(MESON_PATHS_LIST ${TMP_PATHS_LIST})
+set(MESON_CMAKE_ROOT ${CMAKE_ROOT})
+set(MESON_CMAKE_SYSROOT ${CMAKE_SYSROOT})
+set(MESON_FIND_ROOT_PATH ${CMAKE_FIND_ROOT_PATH})
+
+message(STATUS ${TMP_PATHS_LIST})
diff --git a/meson/mesonbuild/dependencies/detect.py b/meson/mesonbuild/dependencies/detect.py
new file mode 100644
index 000000000..c6865d54c
--- /dev/null
+++ b/meson/mesonbuild/dependencies/detect.py
@@ -0,0 +1,226 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .base import Dependency, ExternalDependency, DependencyException, DependencyMethods, NotFoundDependency
+from .cmake import CMakeDependency
+from .dub import DubDependency
+from .framework import ExtraFrameworkDependency
+from .pkgconfig import PkgConfigDependency
+
+from ..mesonlib import listify, MachineChoice, PerMachine
+from .. import mlog
+import functools
+import typing as T
+
+if T.TYPE_CHECKING:
+ from ..environment import Environment
+ from .factory import DependencyFactory, WrappedFactoryFunc, DependencyGenerator
+
+# These must be defined in this file to avoid cyclical references.
+packages: T.Dict[
+ str,
+ T.Union[T.Type[ExternalDependency], 'DependencyFactory', 'WrappedFactoryFunc']
+] = {}
+_packages_accept_language: T.Set[str] = set()
+
+if T.TYPE_CHECKING:
+ TV_DepIDEntry = T.Union[str, bool, int, T.Tuple[str, ...]]
+ TV_DepID = T.Tuple[T.Tuple[str, TV_DepIDEntry], ...]
+
+
+def get_dep_identifier(name: str, kwargs: T.Dict[str, T.Any]) -> 'TV_DepID':
+ identifier: 'TV_DepID' = (('name', name), )
+ from ..interpreter import permitted_dependency_kwargs
+ assert len(permitted_dependency_kwargs) == 19, \
+ 'Extra kwargs have been added to dependency(), please review if it makes sense to handle it here'
+ for key, value in kwargs.items():
+ # 'version' is irrelevant for caching; the caller must check version matches
+ # 'native' is handled above with `for_machine`
+ # 'required' is irrelevant for caching; the caller handles it separately
+ # 'fallback' and 'allow_fallback' is not part of the cache because,
+ # once a dependency has been found through a fallback, it should
+ # be used for the rest of the Meson run.
+ # 'default_options' is only used in fallback case
+ # 'not_found_message' has no impact on the dependency lookup
+ # 'include_type' is handled after the dependency lookup
+ if key in ('version', 'native', 'required', 'fallback', 'allow_fallback', 'default_options',
+ 'not_found_message', 'include_type'):
+ continue
+ # All keyword arguments are strings, ints, or lists (or lists of lists)
+ if isinstance(value, list):
+ value = frozenset(listify(value))
+ for i in value:
+ assert isinstance(i, str)
+ else:
+ assert isinstance(value, (str, bool, int))
+ identifier += (key, value)
+ return identifier
+
+display_name_map = {
+ 'boost': 'Boost',
+ 'cuda': 'CUDA',
+ 'dub': 'DUB',
+ 'gmock': 'GMock',
+ 'gtest': 'GTest',
+ 'hdf5': 'HDF5',
+ 'llvm': 'LLVM',
+ 'mpi': 'MPI',
+ 'netcdf': 'NetCDF',
+ 'openmp': 'OpenMP',
+ 'wxwidgets': 'WxWidgets',
+}
+
+def find_external_dependency(name: str, env: 'Environment', kwargs: T.Dict[str, object]) -> T.Union['ExternalDependency', NotFoundDependency]:
+ assert(name)
+ required = kwargs.get('required', True)
+ if not isinstance(required, bool):
+ raise DependencyException('Keyword "required" must be a boolean.')
+ if not isinstance(kwargs.get('method', ''), str):
+ raise DependencyException('Keyword "method" must be a string.')
+ lname = name.lower()
+ if lname not in _packages_accept_language and 'language' in kwargs:
+ raise DependencyException(f'{name} dependency does not accept "language" keyword argument')
+ if not isinstance(kwargs.get('version', ''), (str, list)):
+ raise DependencyException('Keyword "Version" must be string or list.')
+
+ # display the dependency name with correct casing
+ display_name = display_name_map.get(lname, lname)
+
+ for_machine = MachineChoice.BUILD if kwargs.get('native', False) else MachineChoice.HOST
+
+ type_text = PerMachine('Build-time', 'Run-time')[for_machine] + ' dependency'
+
+ # build a list of dependency methods to try
+ candidates = _build_external_dependency_list(name, env, for_machine, kwargs)
+
+ pkg_exc: T.List[DependencyException] = []
+ pkgdep: T.List[ExternalDependency] = []
+ details = ''
+
+ for c in candidates:
+ # try this dependency method
+ try:
+ d = c()
+ d._check_version()
+ pkgdep.append(d)
+ except DependencyException as e:
+ pkg_exc.append(e)
+ mlog.debug(str(e))
+ else:
+ pkg_exc.append(None)
+ details = d.log_details()
+ if details:
+ details = '(' + details + ') '
+ if 'language' in kwargs:
+ details += 'for ' + d.language + ' '
+
+ # if the dependency was found
+ if d.found():
+
+ info: mlog.TV_LoggableList = []
+ if d.version:
+ info.append(mlog.normal_cyan(d.version))
+
+ log_info = d.log_info()
+ if log_info:
+ info.append('(' + log_info + ')')
+
+ mlog.log(type_text, mlog.bold(display_name), details + 'found:', mlog.green('YES'), *info)
+
+ return d
+
+ # otherwise, the dependency could not be found
+ tried_methods = [d.log_tried() for d in pkgdep if d.log_tried()]
+ if tried_methods:
+ tried = '{}'.format(mlog.format_list(tried_methods))
+ else:
+ tried = ''
+
+ mlog.log(type_text, mlog.bold(display_name), details + 'found:', mlog.red('NO'),
+ f'(tried {tried})' if tried else '')
+
+ if required:
+ # if an exception occurred with the first detection method, re-raise it
+ # (on the grounds that it came from the preferred dependency detection
+ # method)
+ if pkg_exc and pkg_exc[0]:
+ raise pkg_exc[0]
+
+ # we have a list of failed ExternalDependency objects, so we can report
+ # the methods we tried to find the dependency
+ raise DependencyException('Dependency "%s" not found' % (name) +
+ (', tried %s' % (tried) if tried else ''))
+
+ return NotFoundDependency(env)
+
+
+def _build_external_dependency_list(name: str, env: 'Environment', for_machine: MachineChoice,
+ kwargs: T.Dict[str, T.Any]) -> T.List['DependencyGenerator']:
+ # First check if the method is valid
+ if 'method' in kwargs and kwargs['method'] not in [e.value for e in DependencyMethods]:
+ raise DependencyException('method {!r} is invalid'.format(kwargs['method']))
+
+ # Is there a specific dependency detector for this dependency?
+ lname = name.lower()
+ if lname in packages:
+ # Create the list of dependency object constructors using a factory
+ # class method, if one exists, otherwise the list just consists of the
+ # constructor
+ if isinstance(packages[lname], type):
+ entry1 = T.cast(T.Type[ExternalDependency], packages[lname]) # mypy doesn't understand isinstance(..., type)
+ if issubclass(entry1, ExternalDependency):
+ # TODO: somehow make mypy understand that entry1(env, kwargs) is OK...
+ func: T.Callable[[], 'ExternalDependency'] = lambda: entry1(env, kwargs) # type: ignore
+ dep = [func]
+ else:
+ entry2 = T.cast(T.Union['DependencyFactory', 'WrappedFactoryFunc'], packages[lname])
+ dep = entry2(env, for_machine, kwargs)
+ return dep
+
+ candidates: T.List['DependencyGenerator'] = []
+
+ # If it's explicitly requested, use the dub detection method (only)
+ if 'dub' == kwargs.get('method', ''):
+ candidates.append(functools.partial(DubDependency, name, env, kwargs))
+ return candidates
+
+ # If it's explicitly requested, use the pkgconfig detection method (only)
+ if 'pkg-config' == kwargs.get('method', ''):
+ candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs))
+ return candidates
+
+ # If it's explicitly requested, use the CMake detection method (only)
+ if 'cmake' == kwargs.get('method', ''):
+ candidates.append(functools.partial(CMakeDependency, name, env, kwargs))
+ return candidates
+
+ # If it's explicitly requested, use the Extraframework detection method (only)
+ if 'extraframework' == kwargs.get('method', ''):
+ # On OSX, also try framework dependency detector
+ if env.machines[for_machine].is_darwin():
+ candidates.append(functools.partial(ExtraFrameworkDependency, name, env, kwargs))
+ return candidates
+
+ # Otherwise, just use the pkgconfig and cmake dependency detector
+ if 'auto' == kwargs.get('method', 'auto'):
+ candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs))
+
+ # On OSX, also try framework dependency detector
+ if env.machines[for_machine].is_darwin():
+ candidates.append(functools.partial(ExtraFrameworkDependency, name, env, kwargs))
+
+ # Only use CMake as a last resort, since it might not work 100% (see #6113)
+ candidates.append(functools.partial(CMakeDependency, name, env, kwargs))
+
+ return candidates
diff --git a/meson/mesonbuild/dependencies/dev.py b/meson/mesonbuild/dependencies/dev.py
new file mode 100644
index 000000000..7300e2fe7
--- /dev/null
+++ b/meson/mesonbuild/dependencies/dev.py
@@ -0,0 +1,595 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies useful for
+# development purposes, such as testing, debugging, etc..
+
+import glob
+import os
+import re
+import pathlib
+import shutil
+import typing as T
+
+from .. import mesonlib, mlog
+from ..compilers import AppleClangCCompiler, AppleClangCPPCompiler, detect_compiler_for
+from ..environment import get_llvm_tool_names
+from ..mesonlib import version_compare, stringlistify, extract_as_list, MachineChoice
+from .base import DependencyException, DependencyMethods, strip_system_libdirs, SystemDependency
+from .cmake import CMakeDependency
+from .configtool import ConfigToolDependency
+from .factory import DependencyFactory
+from .misc import threads_factory
+from .pkgconfig import PkgConfigDependency
+
+if T.TYPE_CHECKING:
+ from ..envconfig import MachineInfo
+ from .. environment import Environment
+
+
+def get_shared_library_suffix(environment: 'Environment', for_machine: MachineChoice) -> str:
+ """This is only guaranteed to work for languages that compile to machine
+ code, not for languages like C# that use a bytecode and always end in .dll
+ """
+ m = environment.machines[for_machine]
+ if m.is_windows():
+ return '.dll'
+ elif m.is_darwin():
+ return '.dylib'
+ return '.so'
+
+
+class GTestDependencySystem(SystemDependency):
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+ super().__init__(name, environment, kwargs, language='cpp')
+ self.main = kwargs.get('main', False)
+ self.src_dirs = ['/usr/src/gtest/src', '/usr/src/googletest/googletest/src']
+ if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
+ self.is_found = False
+ return
+ self.detect()
+
+ def detect(self) -> None:
+ gtest_detect = self.clib_compiler.find_library("gtest", self.env, [])
+ gtest_main_detect = self.clib_compiler.find_library("gtest_main", self.env, [])
+ if gtest_detect and (not self.main or gtest_main_detect):
+ self.is_found = True
+ self.compile_args = []
+ self.link_args = gtest_detect
+ if self.main:
+ self.link_args += gtest_main_detect
+ self.sources = []
+ self.prebuilt = True
+ elif self.detect_srcdir():
+ self.is_found = True
+ self.compile_args = ['-I' + d for d in self.src_include_dirs]
+ self.link_args = []
+ if self.main:
+ self.sources = [self.all_src, self.main_src]
+ else:
+ self.sources = [self.all_src]
+ self.prebuilt = False
+ else:
+ self.is_found = False
+
+ def detect_srcdir(self) -> bool:
+ for s in self.src_dirs:
+ if os.path.exists(s):
+ self.src_dir = s
+ self.all_src = mesonlib.File.from_absolute_file(
+ os.path.join(self.src_dir, 'gtest-all.cc'))
+ self.main_src = mesonlib.File.from_absolute_file(
+ os.path.join(self.src_dir, 'gtest_main.cc'))
+ self.src_include_dirs = [os.path.normpath(os.path.join(self.src_dir, '..')),
+ os.path.normpath(os.path.join(self.src_dir, '../include')),
+ ]
+ return True
+ return False
+
+ def log_info(self) -> str:
+ if self.prebuilt:
+ return 'prebuilt'
+ else:
+ return 'building self'
+
+ def log_tried(self) -> str:
+ return 'system'
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM]
+
+
+class GTestDependencyPC(PkgConfigDependency):
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ assert name == 'gtest'
+ if kwargs.get('main'):
+ name = 'gtest_main'
+ super().__init__(name, environment, kwargs)
+
+
+class GMockDependencySystem(SystemDependency):
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+ super().__init__(name, environment, kwargs, language='cpp')
+ self.main = kwargs.get('main', False)
+ if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
+ self.is_found = False
+ return
+
+ # If we are getting main() from GMock, we definitely
+ # want to avoid linking in main() from GTest
+ gtest_kwargs = kwargs.copy()
+ if self.main:
+ gtest_kwargs['main'] = False
+
+ # GMock without GTest is pretty much useless
+ # this also mimics the structure given in WrapDB,
+ # where GMock always pulls in GTest
+ found = self._add_sub_dependency(gtest_factory(environment, self.for_machine, gtest_kwargs))
+ if not found:
+ self.is_found = False
+ return
+
+ # GMock may be a library or just source.
+ # Work with both.
+ gmock_detect = self.clib_compiler.find_library("gmock", self.env, [])
+ gmock_main_detect = self.clib_compiler.find_library("gmock_main", self.env, [])
+ if gmock_detect and (not self.main or gmock_main_detect):
+ self.is_found = True
+ self.link_args += gmock_detect
+ if self.main:
+ self.link_args += gmock_main_detect
+ self.prebuilt = True
+ return
+
+ for d in ['/usr/src/googletest/googlemock/src', '/usr/src/gmock/src', '/usr/src/gmock']:
+ if os.path.exists(d):
+ self.is_found = True
+ # Yes, we need both because there are multiple
+ # versions of gmock that do different things.
+ d2 = os.path.normpath(os.path.join(d, '..'))
+ self.compile_args += ['-I' + d, '-I' + d2, '-I' + os.path.join(d2, 'include')]
+ all_src = mesonlib.File.from_absolute_file(os.path.join(d, 'gmock-all.cc'))
+ main_src = mesonlib.File.from_absolute_file(os.path.join(d, 'gmock_main.cc'))
+ if self.main:
+ self.sources += [all_src, main_src]
+ else:
+ self.sources += [all_src]
+ self.prebuilt = False
+ return
+
+ self.is_found = False
+
+ def log_info(self) -> str:
+ if self.prebuilt:
+ return 'prebuilt'
+ else:
+ return 'building self'
+
+ def log_tried(self) -> str:
+ return 'system'
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM]
+
+
+class GMockDependencyPC(PkgConfigDependency):
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ assert name == 'gmock'
+ if kwargs.get('main'):
+ name = 'gmock_main'
+ super().__init__(name, environment, kwargs)
+
+
+class LLVMDependencyConfigTool(ConfigToolDependency):
+ """
+ LLVM uses a special tool, llvm-config, which has arguments for getting
+ c args, cxx args, and ldargs as well as version.
+ """
+ tool_name = 'llvm-config'
+ __cpp_blacklist = {'-DNDEBUG'}
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ self.tools = get_llvm_tool_names('llvm-config')
+
+ # Fedora starting with Fedora 30 adds a suffix of the number
+ # of bits in the isa that llvm targets, for example, on x86_64
+ # and aarch64 the name will be llvm-config-64, on x86 and arm
+ # it will be llvm-config-32.
+ if environment.machines[self.get_for_machine_from_kwargs(kwargs)].is_64_bit:
+ self.tools.append('llvm-config-64')
+ else:
+ self.tools.append('llvm-config-32')
+
+ # It's necessary for LLVM <= 3.8 to use the C++ linker. For 3.9 and 4.0
+ # the C linker works fine if only using the C API.
+ super().__init__(name, environment, kwargs, language='cpp')
+ self.provided_modules: T.List[str] = []
+ self.required_modules: mesonlib.OrderedSet[str] = mesonlib.OrderedSet()
+ self.module_details: T.List[str] = []
+ if not self.is_found:
+ return
+
+ self.provided_modules = self.get_config_value(['--components'], 'modules')
+ modules = stringlistify(extract_as_list(kwargs, 'modules'))
+ self.check_components(modules)
+ opt_modules = stringlistify(extract_as_list(kwargs, 'optional_modules'))
+ self.check_components(opt_modules, required=False)
+
+ cargs = mesonlib.OrderedSet(self.get_config_value(['--cppflags'], 'compile_args'))
+ self.compile_args = list(cargs.difference(self.__cpp_blacklist))
+
+ if version_compare(self.version, '>= 3.9'):
+ self._set_new_link_args(environment)
+ else:
+ self._set_old_link_args()
+ self.link_args = strip_system_libdirs(environment, self.for_machine, self.link_args)
+ self.link_args = self.__fix_bogus_link_args(self.link_args)
+ if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
+ self.is_found = False
+ return
+
+ def __fix_bogus_link_args(self, args: T.List[str]) -> T.List[str]:
+ """This function attempts to fix bogus link arguments that llvm-config
+ generates.
+
+ Currently it works around the following:
+ - FreeBSD: when statically linking -l/usr/lib/libexecinfo.so will
+ be generated, strip the -l in cases like this.
+ - Windows: We may get -LIBPATH:... which is later interpreted as
+ "-L IBPATH:...", if we're using an msvc like compilers convert
+ that to "/LIBPATH", otherwise to "-L ..."
+ """
+
+ new_args = []
+ for arg in args:
+ if arg.startswith('-l') and arg.endswith('.so'):
+ new_args.append(arg.lstrip('-l'))
+ elif arg.startswith('-LIBPATH:'):
+ cpp = self.env.coredata.compilers[self.for_machine]['cpp']
+ new_args.extend(cpp.get_linker_search_args(arg.lstrip('-LIBPATH:')))
+ else:
+ new_args.append(arg)
+ return new_args
+
+ def __check_libfiles(self, shared: bool) -> None:
+ """Use llvm-config's --libfiles to check if libraries exist."""
+ mode = '--link-shared' if shared else '--link-static'
+
+ # Set self.required to true to force an exception in get_config_value
+ # if the returncode != 0
+ restore = self.required
+ self.required = True
+
+ try:
+ # It doesn't matter what the stage is, the caller needs to catch
+ # the exception anyway.
+ self.link_args = self.get_config_value(['--libfiles', mode], '')
+ finally:
+ self.required = restore
+
+ def _set_new_link_args(self, environment: 'Environment') -> None:
+ """How to set linker args for LLVM versions >= 3.9"""
+ try:
+ mode = self.get_config_value(['--shared-mode'], 'link_args')[0]
+ except IndexError:
+ mlog.debug('llvm-config --shared-mode returned an error')
+ self.is_found = False
+ return
+
+ if not self.static and mode == 'static':
+ # If llvm is configured with LLVM_BUILD_LLVM_DYLIB but not with
+ # LLVM_LINK_LLVM_DYLIB and not LLVM_BUILD_SHARED_LIBS (which
+ # upstream doesn't recommend using), then llvm-config will lie to
+ # you about how to do shared-linking. It wants to link to a a bunch
+ # of individual shared libs (which don't exist because llvm wasn't
+ # built with LLVM_BUILD_SHARED_LIBS.
+ #
+ # Therefore, we'll try to get the libfiles, if the return code is 0
+ # or we get an empty list, then we'll try to build a working
+ # configuration by hand.
+ try:
+ self.__check_libfiles(True)
+ except DependencyException:
+ lib_ext = get_shared_library_suffix(environment, self.for_machine)
+ libdir = self.get_config_value(['--libdir'], 'link_args')[0]
+ # Sort for reproducibility
+ matches = sorted(glob.iglob(os.path.join(libdir, f'libLLVM*{lib_ext}')))
+ if not matches:
+ if self.required:
+ raise
+ self.is_found = False
+ return
+
+ self.link_args = self.get_config_value(['--ldflags'], 'link_args')
+ libname = os.path.basename(matches[0]).rstrip(lib_ext).lstrip('lib')
+ self.link_args.append(f'-l{libname}')
+ return
+ elif self.static and mode == 'shared':
+ # If, however LLVM_BUILD_SHARED_LIBS is true # (*cough* gentoo *cough*)
+ # then this is correct. Building with LLVM_BUILD_SHARED_LIBS has a side
+ # effect, it stops the generation of static archives. Therefore we need
+ # to check for that and error out on static if this is the case
+ try:
+ self.__check_libfiles(False)
+ except DependencyException:
+ if self.required:
+ raise
+ self.is_found = False
+ return
+
+ link_args = ['--link-static', '--system-libs'] if self.static else ['--link-shared']
+ self.link_args = self.get_config_value(
+ ['--libs', '--ldflags'] + link_args + list(self.required_modules),
+ 'link_args')
+
+ def _set_old_link_args(self) -> None:
+ """Setting linker args for older versions of llvm.
+
+ Old versions of LLVM bring an extra level of insanity with them.
+ llvm-config will provide the correct arguments for static linking, but
+ not for shared-linnking, we have to figure those out ourselves, because
+ of course we do.
+ """
+ if self.static:
+ self.link_args = self.get_config_value(
+ ['--libs', '--ldflags', '--system-libs'] + list(self.required_modules),
+ 'link_args')
+ else:
+ # llvm-config will provide arguments for static linking, so we get
+ # to figure out for ourselves what to link with. We'll do that by
+ # checking in the directory provided by --libdir for a library
+ # called libLLVM-<ver>.(so|dylib|dll)
+ libdir = self.get_config_value(['--libdir'], 'link_args')[0]
+
+ expected_name = f'libLLVM-{self.version}'
+ re_name = re.compile(fr'{expected_name}.(so|dll|dylib)$')
+
+ for file_ in os.listdir(libdir):
+ if re_name.match(file_):
+ self.link_args = [f'-L{libdir}',
+ '-l{}'.format(os.path.splitext(file_.lstrip('lib'))[0])]
+ break
+ else:
+ raise DependencyException(
+ 'Could not find a dynamically linkable library for LLVM.')
+
+ def check_components(self, modules: T.List[str], required: bool = True) -> None:
+ """Check for llvm components (modules in meson terms).
+
+ The required option is whether the module is required, not whether LLVM
+ is required.
+ """
+ for mod in sorted(set(modules)):
+ status = ''
+
+ if mod not in self.provided_modules:
+ if required:
+ self.is_found = False
+ if self.required:
+ raise DependencyException(
+ f'Could not find required LLVM Component: {mod}')
+ status = '(missing)'
+ else:
+ status = '(missing but optional)'
+ else:
+ self.required_modules.add(mod)
+
+ self.module_details.append(mod + status)
+
+ def log_details(self) -> str:
+ if self.module_details:
+ return 'modules: ' + ', '.join(self.module_details)
+ return ''
+
+class LLVMDependencyCMake(CMakeDependency):
+ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+ self.llvm_modules = stringlistify(extract_as_list(kwargs, 'modules'))
+ self.llvm_opt_modules = stringlistify(extract_as_list(kwargs, 'optional_modules'))
+ super().__init__(name, env, kwargs, language='cpp')
+
+ # Cmake will always create a statically linked binary, so don't use
+ # cmake if dynamic is required
+ if not self.static:
+ self.is_found = False
+ mlog.warning('Ignoring LLVM CMake dependency because dynamic was requested')
+ return
+
+ if self.traceparser is None:
+ return
+
+ # Extract extra include directories and definitions
+ inc_dirs = self.traceparser.get_cmake_var('PACKAGE_INCLUDE_DIRS')
+ defs = self.traceparser.get_cmake_var('PACKAGE_DEFINITIONS')
+ # LLVM explicitly uses space-separated variables rather than semicolon lists
+ if len(defs) == 1:
+ defs = defs[0].split(' ')
+ temp = ['-I' + x for x in inc_dirs] + defs
+ self.compile_args += [x for x in temp if x not in self.compile_args]
+ if not self._add_sub_dependency(threads_factory(env, self.for_machine, {})):
+ self.is_found = False
+ return
+
+ def _main_cmake_file(self) -> str:
+ # Use a custom CMakeLists.txt for LLVM
+ return 'CMakeListsLLVM.txt'
+
+ def _extra_cmake_opts(self) -> T.List[str]:
+ return ['-DLLVM_MESON_MODULES={}'.format(';'.join(self.llvm_modules + self.llvm_opt_modules))]
+
+ def _map_module_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]:
+ res = []
+ for mod, required in modules:
+ cm_targets = self.traceparser.get_cmake_var(f'MESON_LLVM_TARGETS_{mod}')
+ if not cm_targets:
+ if required:
+ raise self._gen_exception(f'LLVM module {mod} was not found')
+ else:
+ mlog.warning('Optional LLVM module', mlog.bold(mod), 'was not found')
+ continue
+ for i in cm_targets:
+ res += [(i, required)]
+ return res
+
+ def _original_module_name(self, module: str) -> str:
+ orig_name = self.traceparser.get_cmake_var(f'MESON_TARGET_TO_LLVM_{module}')
+ if orig_name:
+ return orig_name[0]
+ return module
+
+
+class ValgrindDependency(PkgConfigDependency):
+ '''
+ Consumers of Valgrind usually only need the compile args and do not want to
+ link to its (static) libraries.
+ '''
+ def __init__(self, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__('valgrind', env, kwargs)
+
+ def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]:
+ return []
+
+
+class ZlibSystemDependency(SystemDependency):
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+
+ m = self.env.machines[self.for_machine]
+
+ # I'm not sure this is entirely correct. What if we're cross compiling
+ # from something to macOS?
+ if ((m.is_darwin() and isinstance(self.clib_compiler, (AppleClangCCompiler, AppleClangCPPCompiler))) or
+ m.is_freebsd() or m.is_dragonflybsd()):
+ # No need to set includes,
+ # on macos xcode/clang will do that for us.
+ # on freebsd zlib.h is in /usr/include
+
+ self.is_found = True
+ self.link_args = ['-lz']
+ elif m.is_windows():
+ # Without a clib_compiler we can't find zlib, s just give up.
+ if self.clib_compiler is None:
+ self.is_found = False
+ return
+
+ if self.clib_compiler.get_argument_syntax() == 'msvc':
+ libs = ['zlib1' 'zlib']
+ else:
+ libs = ['z']
+ for lib in libs:
+ l = self.clib_compiler.find_library(lib, environment, [])
+ h = self.clib_compiler.has_header('zlib.h', '', environment, dependencies=[self])
+ if l and h[0]:
+ self.is_found = True
+ self.link_args = l
+ break
+ else:
+ return
+ else:
+ mlog.debug(f'Unsupported OS {m.system}')
+ return
+
+ v, _ = self.clib_compiler.get_define('ZLIB_VERSION', '#include <zlib.h>', self.env, [], [self])
+ self.version = v.strip('"')
+
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.SYSTEM]
+
+
+class JDKSystemDependency(SystemDependency):
+ def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__('jdk', environment, kwargs)
+
+ m = self.env.machines[self.for_machine]
+
+ if 'java' not in environment.coredata.compilers[self.for_machine]:
+ detect_compiler_for(environment, 'java', self.for_machine)
+ self.javac = environment.coredata.compilers[self.for_machine]['java']
+ self.version = self.javac.version
+
+ if 'version' in kwargs and not version_compare(self.version, kwargs['version']):
+ mlog.error(f'Incorrect JDK version found ({self.version}), wanted {kwargs["version"]}')
+ self.is_found = False
+ return
+
+ self.java_home = environment.properties[self.for_machine].get_java_home()
+ if not self.java_home:
+ self.java_home = pathlib.Path(shutil.which(self.javac.exelist[0])).resolve().parents[1]
+
+ platform_include_dir = self.__machine_info_to_platform_include_dir(m)
+ if platform_include_dir is None:
+ mlog.error("Could not find a JDK platform include directory for your OS, please open an issue or provide a pull request.")
+ self.is_found = False
+ return
+
+ java_home_include = self.java_home / 'include'
+ self.compile_args.append(f'-I{java_home_include}')
+ self.compile_args.append(f'-I{java_home_include / platform_include_dir}')
+ self.is_found = True
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.SYSTEM]
+
+ @staticmethod
+ def __machine_info_to_platform_include_dir(m: 'MachineInfo') -> T.Optional[str]:
+ """Translates the machine information to the platform-dependent include directory
+
+ When inspecting a JDK release tarball or $JAVA_HOME, inside the `include/` directory is a
+ platform dependent folder that must be on the target's include path in addition to the
+ parent `include/` directory.
+ """
+ if m.is_linux():
+ return 'linux'
+ elif m.is_windows():
+ return 'win32'
+ elif m.is_darwin():
+ return 'darwin'
+
+ return None
+
+
+llvm_factory = DependencyFactory(
+ 'LLVM',
+ [DependencyMethods.CMAKE, DependencyMethods.CONFIG_TOOL],
+ cmake_class=LLVMDependencyCMake,
+ configtool_class=LLVMDependencyConfigTool,
+)
+
+gtest_factory = DependencyFactory(
+ 'gtest',
+ [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+ pkgconfig_class=GTestDependencyPC,
+ system_class=GTestDependencySystem,
+)
+
+gmock_factory = DependencyFactory(
+ 'gmock',
+ [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+ pkgconfig_class=GMockDependencyPC,
+ system_class=GMockDependencySystem,
+)
+
+zlib_factory = DependencyFactory(
+ 'zlib',
+ [DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE, DependencyMethods.SYSTEM],
+ cmake_name='ZLIB',
+ system_class=ZlibSystemDependency,
+)
diff --git a/meson/mesonbuild/dependencies/dub.py b/meson/mesonbuild/dependencies/dub.py
new file mode 100644
index 000000000..8dfb4869d
--- /dev/null
+++ b/meson/mesonbuild/dependencies/dub.py
@@ -0,0 +1,240 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .base import ExternalDependency, DependencyException, DependencyMethods, DependencyTypeName
+from .pkgconfig import PkgConfigDependency
+from ..mesonlib import Popen_safe
+from ..programs import ExternalProgram
+from ..compilers import DCompiler
+from .. import mlog
+import re
+import os
+import copy
+import json
+import platform
+import typing as T
+
+if T.TYPE_CHECKING:
+ from ..environment import Environment
+
+class DubDependency(ExternalDependency):
+ class_dubbin = None
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(DependencyTypeName('dub'), environment, kwargs, language='d')
+ self.name = name
+ self.module_path: T.Optional[str] = None
+
+ _temp_comp = super().get_compiler()
+ assert isinstance(_temp_comp, DCompiler)
+ self.compiler = _temp_comp
+
+ if 'required' in kwargs:
+ self.required = kwargs.get('required')
+
+ if DubDependency.class_dubbin is None:
+ self.dubbin = self._check_dub()
+ DubDependency.class_dubbin = self.dubbin
+ else:
+ self.dubbin = DubDependency.class_dubbin
+
+ if not self.dubbin:
+ if self.required:
+ raise DependencyException('DUB not found.')
+ self.is_found = False
+ return
+
+ assert isinstance(self.dubbin, ExternalProgram)
+ mlog.debug('Determining dependency {!r} with DUB executable '
+ '{!r}'.format(name, self.dubbin.get_path()))
+
+ # we need to know the target architecture
+ arch = self.compiler.arch
+
+ # Ask dub for the package
+ ret, res = self._call_dubbin(['describe', name, '--arch=' + arch])
+
+ if ret != 0:
+ self.is_found = False
+ return
+
+ comp = self.compiler.get_id().replace('llvm', 'ldc').replace('gcc', 'gdc')
+ packages = []
+ description = json.loads(res)
+ for package in description['packages']:
+ packages.append(package['name'])
+ if package['name'] == name:
+ self.is_found = True
+
+ not_lib = True
+ if 'targetType' in package:
+ if package['targetType'] in ['library', 'sourceLibrary', 'staticLibrary', 'dynamicLibrary']:
+ not_lib = False
+
+ if not_lib:
+ mlog.error(mlog.bold(name), "found but it isn't a library")
+ self.is_found = False
+ return
+
+ self.module_path = self._find_right_lib_path(package['path'], comp, description, True, package['targetFileName'])
+ if not os.path.exists(self.module_path):
+ # check if the dependency was built for other archs
+ archs = [['x86_64'], ['x86'], ['x86', 'x86_mscoff']]
+ for a in archs:
+ description_a = copy.deepcopy(description)
+ description_a['architecture'] = a
+ arch_module_path = self._find_right_lib_path(package['path'], comp, description_a, True, package['targetFileName'])
+ if arch_module_path:
+ mlog.error(mlog.bold(name), "found but it wasn't compiled for", mlog.bold(arch))
+ self.is_found = False
+ return
+
+ mlog.error(mlog.bold(name), "found but it wasn't compiled with", mlog.bold(comp))
+ self.is_found = False
+ return
+
+ self.version = package['version']
+ self.pkg = package
+
+ if self.pkg['targetFileName'].endswith('.a'):
+ self.static = True
+
+ self.compile_args = []
+ for flag in self.pkg['dflags']:
+ self.link_args.append(flag)
+ for path in self.pkg['importPaths']:
+ self.compile_args.append('-I' + os.path.join(self.pkg['path'], path))
+
+ self.link_args = self.raw_link_args = []
+ for flag in self.pkg['lflags']:
+ self.link_args.append(flag)
+
+ self.link_args.append(os.path.join(self.module_path, self.pkg['targetFileName']))
+
+ # Handle dependencies
+ libs = []
+
+ def add_lib_args(field_name: str, target: T.Dict[str, T.Dict[str, str]]) -> None:
+ if field_name in target['buildSettings']:
+ for lib in target['buildSettings'][field_name]:
+ if lib not in libs:
+ libs.append(lib)
+ if os.name != 'nt':
+ pkgdep = PkgConfigDependency(lib, environment, {'required': 'true', 'silent': 'true'})
+ for arg in pkgdep.get_compile_args():
+ self.compile_args.append(arg)
+ for arg in pkgdep.get_link_args():
+ self.link_args.append(arg)
+ for arg in pkgdep.get_link_args(raw=True):
+ self.raw_link_args.append(arg)
+
+ for target in description['targets']:
+ if target['rootPackage'] in packages:
+ add_lib_args('libs', target)
+ add_lib_args(f'libs-{platform.machine()}', target)
+ for file in target['buildSettings']['linkerFiles']:
+ lib_path = self._find_right_lib_path(file, comp, description)
+ if lib_path:
+ self.link_args.append(lib_path)
+ else:
+ self.is_found = False
+
+ def _find_right_lib_path(self,
+ default_path: str,
+ comp: str,
+ description: T.Dict[str, str],
+ folder_only: bool = False,
+ file_name: str = '') -> T.Optional[str]:
+ module_path = lib_file_name = ''
+ if folder_only:
+ module_path = default_path
+ lib_file_name = file_name
+ else:
+ module_path = os.path.dirname(default_path)
+ lib_file_name = os.path.basename(default_path)
+ module_build_path = os.path.join(module_path, '.dub', 'build')
+
+ # If default_path is a path to lib file and
+ # directory of lib don't have subdir '.dub/build'
+ if not os.path.isdir(module_build_path) and os.path.isfile(default_path):
+ if folder_only:
+ return module_path
+ else:
+ return default_path
+
+ # Get D version implemented in the compiler
+ # gdc doesn't support this
+ ret, res = self._call_dubbin(['--version'])
+
+ if ret != 0:
+ mlog.error('Failed to run {!r}', mlog.bold(comp))
+ return None
+
+ d_ver_reg = re.search('v[0-9].[0-9][0-9][0-9].[0-9]', res) # Ex.: v2.081.2
+ if d_ver_reg is not None:
+ d_ver = d_ver_reg.group().rsplit('.', 1)[0].replace('v', '').replace('.', '') # Fix structure. Ex.: 2081
+ else:
+ d_ver = '' # gdc
+
+ if not os.path.isdir(module_build_path):
+ return ''
+
+ # Ex.: library-debug-linux.posix-x86_64-ldc_2081-EF934983A3319F8F8FF2F0E107A363BA
+ build_name = '-{}-{}-{}-{}_{}'.format(description['buildType'], '.'.join(description['platform']), '.'.join(description['architecture']), comp, d_ver)
+ for entry in os.listdir(module_build_path):
+ if build_name in entry:
+ for file in os.listdir(os.path.join(module_build_path, entry)):
+ if file == lib_file_name:
+ if folder_only:
+ return os.path.join(module_build_path, entry)
+ else:
+ return os.path.join(module_build_path, entry, lib_file_name)
+
+ return ''
+
+ def _call_dubbin(self, args: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str]:
+ assert isinstance(self.dubbin, ExternalProgram)
+ p, out = Popen_safe(self.dubbin.get_command() + args, env=env)[0:2]
+ return p.returncode, out.strip()
+
+ def _call_copmbin(self, args: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str]:
+ p, out = Popen_safe(self.compiler.get_exelist() + args, env=env)[0:2]
+ return p.returncode, out.strip()
+
+ def _check_dub(self) -> T.Union[bool, ExternalProgram]:
+ dubbin: T.Union[bool, ExternalProgram] = ExternalProgram('dub', silent=True)
+ assert isinstance(dubbin, ExternalProgram)
+ if dubbin.found():
+ try:
+ p, out = Popen_safe(dubbin.get_command() + ['--version'])[0:2]
+ if p.returncode != 0:
+ mlog.warning('Found dub {!r} but couldn\'t run it'
+ ''.format(' '.join(dubbin.get_command())))
+ # Set to False instead of None to signify that we've already
+ # searched for it and not found it
+ dubbin = False
+ except (FileNotFoundError, PermissionError):
+ dubbin = False
+ else:
+ dubbin = False
+ if isinstance(dubbin, ExternalProgram):
+ mlog.log('Found DUB:', mlog.bold(dubbin.get_path()),
+ '(%s)' % out.strip())
+ else:
+ mlog.log('Found DUB:', mlog.red('NO'))
+ return dubbin
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.DUB]
diff --git a/meson/mesonbuild/dependencies/factory.py b/meson/mesonbuild/dependencies/factory.py
new file mode 100644
index 000000000..048e3bc58
--- /dev/null
+++ b/meson/mesonbuild/dependencies/factory.py
@@ -0,0 +1,151 @@
+# Copyright 2013-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import typing as T
+
+from ..mesonlib import MachineChoice
+from .base import DependencyException, DependencyMethods
+from .base import ExternalDependency
+from .base import process_method_kw
+from .base import BuiltinDependency, SystemDependency
+from .cmake import CMakeDependency
+from .framework import ExtraFrameworkDependency
+from .pkgconfig import PkgConfigDependency
+
+if T.TYPE_CHECKING:
+ from ..environment import Environment
+ from .configtool import ConfigToolDependency
+
+ DependencyGenerator = T.Callable[[], ExternalDependency]
+ FactoryFunc = T.Callable[
+ [
+ 'Environment',
+ MachineChoice,
+ T.Dict[str, T.Any],
+ T.List[DependencyMethods]
+ ],
+ T.List[DependencyGenerator]
+ ]
+
+ WrappedFactoryFunc = T.Callable[
+ [
+ 'Environment',
+ MachineChoice,
+ T.Dict[str, T.Any]
+ ],
+ T.List[DependencyGenerator]
+ ]
+
+class DependencyFactory:
+
+ """Factory to get dependencies from multiple sources.
+
+ This class provides an initializer that takes a set of names and classes
+ for various kinds of dependencies. When the initialized object is called
+ it returns a list of callables return Dependency objects to try in order.
+
+ :name: The name of the dependency. This will be passed as the name
+ parameter of the each dependency unless it is overridden on a per
+ type basis.
+ :methods: An ordered list of DependencyMethods. This is the order
+ dependencies will be returned in unless they are removed by the
+ _process_method function
+ :*_name: This will overwrite the name passed to the coresponding class.
+ For example, if the name is 'zlib', but cmake calls the dependency
+ 'Z', then using `cmake_name='Z'` will pass the name as 'Z' to cmake.
+ :*_class: A *type* or callable that creates a class, and has the
+ signature of an ExternalDependency
+ :system_class: If you pass DependencyMethods.SYSTEM in methods, you must
+ set this argument.
+ """
+
+ def __init__(self, name: str, methods: T.List[DependencyMethods], *,
+ extra_kwargs: T.Optional[T.Dict[str, T.Any]] = None,
+ pkgconfig_name: T.Optional[str] = None,
+ pkgconfig_class: 'T.Type[PkgConfigDependency]' = PkgConfigDependency,
+ cmake_name: T.Optional[str] = None,
+ cmake_class: 'T.Type[CMakeDependency]' = CMakeDependency,
+ configtool_class: 'T.Optional[T.Type[ConfigToolDependency]]' = None,
+ framework_name: T.Optional[str] = None,
+ framework_class: 'T.Type[ExtraFrameworkDependency]' = ExtraFrameworkDependency,
+ builtin_class: 'T.Type[BuiltinDependency]' = BuiltinDependency,
+ system_class: 'T.Type[SystemDependency]' = SystemDependency):
+
+ if DependencyMethods.CONFIG_TOOL in methods and not configtool_class:
+ raise DependencyException('A configtool must have a custom class')
+
+ self.extra_kwargs = extra_kwargs or {}
+ self.methods = methods
+ self.classes: T.Dict[
+ DependencyMethods,
+ T.Callable[['Environment', T.Dict[str, T.Any]], ExternalDependency]
+ ] = {
+ # Just attach the correct name right now, either the generic name
+ # or the method specific name.
+ DependencyMethods.EXTRAFRAMEWORK: lambda env, kwargs: framework_class(framework_name or name, env, kwargs),
+ DependencyMethods.PKGCONFIG: lambda env, kwargs: pkgconfig_class(pkgconfig_name or name, env, kwargs),
+ DependencyMethods.CMAKE: lambda env, kwargs: cmake_class(cmake_name or name, env, kwargs),
+ DependencyMethods.SYSTEM: lambda env, kwargs: system_class(name, env, kwargs),
+ DependencyMethods.BUILTIN: lambda env, kwargs: builtin_class(name, env, kwargs),
+ DependencyMethods.CONFIG_TOOL: None,
+ }
+ if configtool_class is not None:
+ self.classes[DependencyMethods.CONFIG_TOOL] = lambda env, kwargs: configtool_class(name, env, kwargs)
+
+ @staticmethod
+ def _process_method(method: DependencyMethods, env: 'Environment', for_machine: MachineChoice) -> bool:
+ """Report whether a method is valid or not.
+
+ If the method is valid, return true, otherwise return false. This is
+ used in a list comprehension to filter methods that are not possible.
+
+ By default this only remove EXTRAFRAMEWORK dependencies for non-mac platforms.
+ """
+ # Extra frameworks are only valid for macOS and other apple products
+ if (method is DependencyMethods.EXTRAFRAMEWORK and
+ not env.machines[for_machine].is_darwin()):
+ return False
+ return True
+
+ def __call__(self, env: 'Environment', for_machine: MachineChoice,
+ kwargs: T.Dict[str, T.Any]) -> T.List['DependencyGenerator']:
+ """Return a list of Dependencies with the arguments already attached."""
+ methods = process_method_kw(self.methods, kwargs)
+ nwargs = self.extra_kwargs.copy()
+ nwargs.update(kwargs)
+
+ return [functools.partial(self.classes[m], env, nwargs) for m in methods
+ if self._process_method(m, env, for_machine)]
+
+
+def factory_methods(methods: T.Set[DependencyMethods]) -> T.Callable[['FactoryFunc'], 'WrappedFactoryFunc']:
+ """Decorator for handling methods for dependency factory functions.
+
+ This helps to make factory functions self documenting
+ >>> @factory_methods([DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE])
+ >>> def factory(env: Environment, for_machine: MachineChoice, kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+ >>> pass
+ """
+
+ def inner(func: 'FactoryFunc') -> 'WrappedFactoryFunc':
+
+ @functools.wraps(func)
+ def wrapped(env: 'Environment', for_machine: MachineChoice, kwargs: T.Dict[str, T.Any]) -> T.List['DependencyGenerator']:
+ return func(env, for_machine, kwargs, process_method_kw(methods, kwargs))
+
+ return wrapped
+
+ return inner
diff --git a/meson/mesonbuild/dependencies/framework.py b/meson/mesonbuild/dependencies/framework.py
new file mode 100644
index 000000000..48223987e
--- /dev/null
+++ b/meson/mesonbuild/dependencies/framework.py
@@ -0,0 +1,123 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .base import DependencyTypeName, ExternalDependency, DependencyException, DependencyMethods
+from ..mesonlib import MesonException, Version, stringlistify
+from .. import mlog
+from pathlib import Path
+import typing as T
+
+if T.TYPE_CHECKING:
+ from ..environment import Environment
+
+class ExtraFrameworkDependency(ExternalDependency):
+ system_framework_paths: T.Optional[T.List[str]] = None
+
+ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+ paths = stringlistify(kwargs.get('paths', []))
+ super().__init__(DependencyTypeName('extraframeworks'), env, kwargs, language=language)
+ self.name = name
+ # Full path to framework directory
+ self.framework_path: T.Optional[str] = None
+ if not self.clib_compiler:
+ raise DependencyException('No C-like compilers are available')
+ if self.system_framework_paths is None:
+ try:
+ self.system_framework_paths = self.clib_compiler.find_framework_paths(self.env)
+ except MesonException as e:
+ if 'non-clang' in str(e):
+ # Apple frameworks can only be found (and used) with the
+ # system compiler. It is not available so bail immediately.
+ self.is_found = False
+ return
+ raise
+ self.detect(name, paths)
+
+ def detect(self, name: str, paths: T.List[str]) -> None:
+ if not paths:
+ paths = self.system_framework_paths
+ for p in paths:
+ mlog.debug(f'Looking for framework {name} in {p}')
+ # We need to know the exact framework path because it's used by the
+ # Qt5 dependency class, and for setting the include path. We also
+ # want to avoid searching in an invalid framework path which wastes
+ # time and can cause a false positive.
+ framework_path = self._get_framework_path(p, name)
+ if framework_path is None:
+ continue
+ # We want to prefer the specified paths (in order) over the system
+ # paths since these are "extra" frameworks.
+ # For example, Python2's framework is in /System/Library/Frameworks and
+ # Python3's framework is in /Library/Frameworks, but both are called
+ # Python.framework. We need to know for sure that the framework was
+ # found in the path we expect.
+ allow_system = p in self.system_framework_paths
+ args = self.clib_compiler.find_framework(name, self.env, [p], allow_system)
+ if args is None:
+ continue
+ self.link_args = args
+ self.framework_path = framework_path.as_posix()
+ self.compile_args = ['-F' + self.framework_path]
+ # We need to also add -I includes to the framework because all
+ # cross-platform projects such as OpenGL, Python, Qt, GStreamer,
+ # etc do not use "framework includes":
+ # https://developer.apple.com/library/archive/documentation/MacOSX/Conceptual/BPFrameworks/Tasks/IncludingFrameworks.html
+ incdir = self._get_framework_include_path(framework_path)
+ if incdir:
+ self.compile_args += ['-I' + incdir]
+ self.is_found = True
+ return
+
+ def _get_framework_path(self, path: str, name: str) -> T.Optional[Path]:
+ p = Path(path)
+ lname = name.lower()
+ for d in p.glob('*.framework/'):
+ if lname == d.name.rsplit('.', 1)[0].lower():
+ return d
+ return None
+
+ def _get_framework_latest_version(self, path: Path) -> str:
+ versions = []
+ for each in path.glob('Versions/*'):
+ # macOS filesystems are usually case-insensitive
+ if each.name.lower() == 'current':
+ continue
+ versions.append(Version(each.name))
+ if len(versions) == 0:
+ # most system frameworks do not have a 'Versions' directory
+ return 'Headers'
+ return 'Versions/{}/Headers'.format(sorted(versions)[-1]._s)
+
+ def _get_framework_include_path(self, path: Path) -> T.Optional[str]:
+ # According to the spec, 'Headers' must always be a symlink to the
+ # Headers directory inside the currently-selected version of the
+ # framework, but sometimes frameworks are broken. Look in 'Versions'
+ # for the currently-selected version or pick the latest one.
+ trials = ('Headers', 'Versions/Current/Headers',
+ self._get_framework_latest_version(path))
+ for each in trials:
+ trial = path / each
+ if trial.is_dir():
+ return trial.as_posix()
+ return None
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.EXTRAFRAMEWORK]
+
+ def log_info(self) -> str:
+ return self.framework_path or ''
+
+ def log_tried(self) -> str:
+ return 'framework'
diff --git a/meson/mesonbuild/dependencies/hdf5.py b/meson/mesonbuild/dependencies/hdf5.py
new file mode 100644
index 000000000..c062e713e
--- /dev/null
+++ b/meson/mesonbuild/dependencies/hdf5.py
@@ -0,0 +1,180 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for miscellaneous external dependencies.
+
+import functools
+import os
+import re
+import shutil
+import subprocess
+from pathlib import Path
+
+from ..mesonlib import OrderedSet, join_args
+from .base import DependencyException, DependencyMethods
+from .configtool import ConfigToolDependency
+from .pkgconfig import PkgConfigDependency
+from .factory import factory_methods
+import typing as T
+
+if T.TYPE_CHECKING:
+ from .base import Dependency
+ from .factory import DependencyGenerator
+ from ..environment import Environment
+ from ..mesonlib import MachineChoice
+
+
+class HDF5PkgConfigDependency(PkgConfigDependency):
+
+ """Handle brokenness in the HDF5 pkg-config files."""
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+ language = language or 'c'
+ if language not in {'c', 'cpp', 'fortran'}:
+ raise DependencyException(f'Language {language} is not supported with HDF5.')
+
+ super().__init__(name, environment, kwargs, language)
+ if not self.is_found:
+ return
+
+ # some broken pkgconfig don't actually list the full path to the needed includes
+ newinc = [] # type: T.List[str]
+ for arg in self.compile_args:
+ if arg.startswith('-I'):
+ stem = 'static' if kwargs.get('static', False) else 'shared'
+ if (Path(arg[2:]) / stem).is_dir():
+ newinc.append('-I' + str(Path(arg[2:]) / stem))
+ self.compile_args += newinc
+
+ link_args = [] # type: T.List[str]
+ for larg in self.get_link_args():
+ lpath = Path(larg)
+ # some pkg-config hdf5.pc (e.g. Ubuntu) don't include the commonly-used HL HDF5 libraries,
+ # so let's add them if they exist
+ # additionally, some pkgconfig HDF5 HL files are malformed so let's be sure to find HL anyway
+ if lpath.is_file():
+ hl = []
+ if language == 'cpp':
+ hl += ['_hl_cpp', '_cpp']
+ elif language == 'fortran':
+ hl += ['_hl_fortran', 'hl_fortran', '_fortran']
+ hl += ['_hl'] # C HL library, always needed
+
+ suffix = '.' + lpath.name.split('.', 1)[1] # in case of .dll.a
+ for h in hl:
+ hlfn = lpath.parent / (lpath.name.split('.', 1)[0] + h + suffix)
+ if hlfn.is_file():
+ link_args.append(str(hlfn))
+ # HDF5 C libs are required by other HDF5 languages
+ link_args.append(larg)
+ else:
+ link_args.append(larg)
+
+ self.link_args = link_args
+
+
+class HDF5ConfigToolDependency(ConfigToolDependency):
+
+ """Wrapper around hdf5 binary config tools."""
+
+ version_arg = '-showconfig'
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+ language = language or 'c'
+ if language not in {'c', 'cpp', 'fortran'}:
+ raise DependencyException(f'Language {language} is not supported with HDF5.')
+
+ if language == 'c':
+ cenv = 'CC'
+ tools = ['h5cc']
+ elif language == 'cpp':
+ cenv = 'CXX'
+ tools = ['h5c++']
+ elif language == 'fortran':
+ cenv = 'FC'
+ tools = ['h5fc']
+ else:
+ raise DependencyException('How did you get here?')
+
+ # We need this before we call super()
+ for_machine = self.get_for_machine_from_kwargs(kwargs)
+
+ nkwargs = kwargs.copy()
+ nkwargs['tools'] = tools
+
+ # Override the compiler that the config tools are going to use by
+ # setting the environment variables that they use for the compiler and
+ # linkers.
+ compiler = environment.coredata.compilers[for_machine][language]
+ try:
+ os.environ[f'HDF5_{cenv}'] = join_args(compiler.get_exelist())
+ os.environ[f'HDF5_{cenv}LINKER'] = join_args(compiler.get_linker_exelist())
+ super().__init__(name, environment, nkwargs, language)
+ finally:
+ del os.environ[f'HDF5_{cenv}']
+ del os.environ[f'HDF5_{cenv}LINKER']
+ if not self.is_found:
+ return
+
+ # We first need to call the tool with -c to get the compile arguments
+ # and then without -c to get the link arguments.
+ args = self.get_config_value(['-show', '-c'], 'args')[1:]
+ args += self.get_config_value(['-show', '-noshlib' if kwargs.get('static', False) else '-shlib'], 'args')[1:]
+ for arg in args:
+ if arg.startswith(('-I', '-f', '-D')) or arg == '-pthread':
+ self.compile_args.append(arg)
+ elif arg.startswith(('-L', '-l', '-Wl')):
+ self.link_args.append(arg)
+ elif Path(arg).is_file():
+ self.link_args.append(arg)
+
+ # If the language is not C we need to add C as a subdependency
+ if language != 'c':
+ nkwargs = kwargs.copy()
+ nkwargs['language'] = 'c'
+ # I'm being too clever for mypy and pylint
+ self.is_found = self._add_sub_dependency(hdf5_factory(environment, for_machine, nkwargs)) # pylint: disable=no-value-for-parameter
+
+ def _sanitize_version(self, ver: str) -> str:
+ v = re.search(r'\s*HDF5 Version: (\d+\.\d+\.\d+)', ver)
+ return v.group(1)
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL})
+def hdf5_factory(env: 'Environment', for_machine: 'MachineChoice',
+ kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+ language = kwargs.get('language')
+ candidates: T.List['DependencyGenerator'] = []
+
+ if DependencyMethods.PKGCONFIG in methods:
+ # Use an ordered set so that these remain the first tried pkg-config files
+ pkgconfig_files = OrderedSet(['hdf5', 'hdf5-serial'])
+ # FIXME: This won't honor pkg-config paths, and cross-native files
+ PCEXE = shutil.which('pkg-config')
+ if PCEXE:
+ # some distros put hdf5-1.2.3.pc with version number in .pc filename.
+ ret = subprocess.run([PCEXE, '--list-all'], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL,
+ universal_newlines=True)
+ if ret.returncode == 0:
+ for pkg in ret.stdout.split('\n'):
+ if pkg.startswith('hdf5'):
+ pkgconfig_files.add(pkg.split(' ', 1)[0])
+
+ for pkg in pkgconfig_files:
+ candidates.append(functools.partial(HDF5PkgConfigDependency, pkg, env, kwargs, language))
+
+ if DependencyMethods.CONFIG_TOOL in methods:
+ candidates.append(functools.partial(HDF5ConfigToolDependency, 'hdf5', env, kwargs, language))
+
+ return candidates
diff --git a/meson/mesonbuild/dependencies/misc.py b/meson/mesonbuild/dependencies/misc.py
new file mode 100644
index 000000000..483212f96
--- /dev/null
+++ b/meson/mesonbuild/dependencies/misc.py
@@ -0,0 +1,623 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for miscellaneous external dependencies.
+
+from pathlib import Path
+import functools
+import re
+import sysconfig
+import typing as T
+
+from .. import mesonlib
+from .. import mlog
+from ..environment import detect_cpu_family
+from .base import DependencyException, DependencyMethods
+from .base import BuiltinDependency, SystemDependency
+from .cmake import CMakeDependency
+from .configtool import ConfigToolDependency
+from .factory import DependencyFactory, factory_methods
+from .pkgconfig import PkgConfigDependency
+
+if T.TYPE_CHECKING:
+ from ..environment import Environment, MachineChoice
+ from .factory import DependencyGenerator
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE})
+def netcdf_factory(env: 'Environment',
+ for_machine: 'MachineChoice',
+ kwargs: T.Dict[str, T.Any],
+ methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+ language = kwargs.get('language', 'c')
+ if language not in ('c', 'cpp', 'fortran'):
+ raise DependencyException(f'Language {language} is not supported with NetCDF.')
+
+ candidates: T.List['DependencyGenerator'] = []
+
+ if DependencyMethods.PKGCONFIG in methods:
+ if language == 'fortran':
+ pkg = 'netcdf-fortran'
+ else:
+ pkg = 'netcdf'
+
+ candidates.append(functools.partial(PkgConfigDependency, pkg, env, kwargs, language=language))
+
+ if DependencyMethods.CMAKE in methods:
+ candidates.append(functools.partial(CMakeDependency, 'NetCDF', env, kwargs, language=language))
+
+ return candidates
+
+
+class OpenMPDependency(SystemDependency):
+ # Map date of specification release (which is the macro value) to a version.
+ VERSIONS = {
+ '201811': '5.0',
+ '201611': '5.0-revision1', # This is supported by ICC 19.x
+ '201511': '4.5',
+ '201307': '4.0',
+ '201107': '3.1',
+ '200805': '3.0',
+ '200505': '2.5',
+ '200203': '2.0',
+ '199810': '1.0',
+ }
+
+ def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+ language = kwargs.get('language')
+ super().__init__('openmp', environment, kwargs, language=language)
+ self.is_found = False
+ if self.clib_compiler.get_id() == 'pgi':
+ # through at least PGI 19.4, there is no macro defined for OpenMP, but OpenMP 3.1 is supported.
+ self.version = '3.1'
+ self.is_found = True
+ self.compile_args = self.link_args = self.clib_compiler.openmp_flags()
+ return
+ try:
+ openmp_date = self.clib_compiler.get_define(
+ '_OPENMP', '', self.env, self.clib_compiler.openmp_flags(), [self], disable_cache=True)[0]
+ except mesonlib.EnvironmentException as e:
+ mlog.debug('OpenMP support not available in the compiler')
+ mlog.debug(e)
+ openmp_date = None
+
+ if openmp_date:
+ try:
+ self.version = self.VERSIONS[openmp_date]
+ except KeyError:
+ mlog.debug(f'Could not find an OpenMP version matching {openmp_date}')
+ if openmp_date == '_OPENMP':
+ mlog.debug('This can be caused by flags such as gcc\'s `-fdirectives-only`, which affect preprocessor behavior.')
+ return
+ # Flang has omp_lib.h
+ header_names = ('omp.h', 'omp_lib.h')
+ for name in header_names:
+ if self.clib_compiler.has_header(name, '', self.env, dependencies=[self], disable_cache=True)[0]:
+ self.is_found = True
+ self.compile_args = self.clib_compiler.openmp_flags()
+ self.link_args = self.clib_compiler.openmp_link_flags()
+ break
+ if not self.is_found:
+ mlog.log(mlog.yellow('WARNING:'), 'OpenMP found but omp.h missing.')
+
+
+class ThreadDependency(SystemDependency):
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+ super().__init__(name, environment, kwargs)
+ self.is_found = True
+ # Happens if you are using a language with threads
+ # concept without C, such as plain Cuda.
+ if self.clib_compiler is None:
+ self.compile_args = []
+ self.link_args = []
+ else:
+ self.compile_args = self.clib_compiler.thread_flags(environment)
+ self.link_args = self.clib_compiler.thread_link_flags(environment)
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.AUTO, DependencyMethods.CMAKE]
+
+
+class BlocksDependency(SystemDependency):
+ def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+ super().__init__('blocks', environment, kwargs)
+ self.name = 'blocks'
+ self.is_found = False
+
+ if self.env.machines[self.for_machine].is_darwin():
+ self.compile_args = []
+ self.link_args = []
+ else:
+ self.compile_args = ['-fblocks']
+ self.link_args = ['-lBlocksRuntime']
+
+ if not self.clib_compiler.has_header('Block.h', '', environment, disable_cache=True) or \
+ not self.clib_compiler.find_library('BlocksRuntime', environment, []):
+ mlog.log(mlog.red('ERROR:'), 'BlocksRuntime not found.')
+ return
+
+ source = '''
+ int main(int argc, char **argv)
+ {
+ int (^callback)(void) = ^ int (void) { return 0; };
+ return callback();
+ }'''
+
+ with self.clib_compiler.compile(source, extra_args=self.compile_args + self.link_args) as p:
+ if p.returncode != 0:
+ mlog.log(mlog.red('ERROR:'), 'Compiler does not support blocks extension.')
+ return
+
+ self.is_found = True
+
+
+class Python3DependencySystem(SystemDependency):
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+ super().__init__(name, environment, kwargs)
+
+ if not environment.machines.matches_build_machine(self.for_machine):
+ return
+ if not environment.machines[self.for_machine].is_windows():
+ return
+
+ self.name = 'python3'
+ self.static = kwargs.get('static', False)
+ # We can only be sure that it is Python 3 at this point
+ self.version = '3'
+ self._find_libpy3_windows(environment)
+
+ @staticmethod
+ def get_windows_python_arch() -> T.Optional[str]:
+ pyplat = sysconfig.get_platform()
+ if pyplat == 'mingw':
+ pycc = sysconfig.get_config_var('CC')
+ if pycc.startswith('x86_64'):
+ return '64'
+ elif pycc.startswith(('i686', 'i386')):
+ return '32'
+ else:
+ mlog.log(f'MinGW Python built with unknown CC {pycc!r}, please file a bug')
+ return None
+ elif pyplat == 'win32':
+ return '32'
+ elif pyplat in ('win64', 'win-amd64'):
+ return '64'
+ mlog.log(f'Unknown Windows Python platform {pyplat!r}')
+ return None
+
+ def get_windows_link_args(self) -> T.Optional[T.List[str]]:
+ pyplat = sysconfig.get_platform()
+ if pyplat.startswith('win'):
+ vernum = sysconfig.get_config_var('py_version_nodot')
+ if self.static:
+ libpath = Path('libs') / f'libpython{vernum}.a'
+ else:
+ comp = self.get_compiler()
+ if comp.id == "gcc":
+ libpath = Path(f'python{vernum}.dll')
+ else:
+ libpath = Path('libs') / f'python{vernum}.lib'
+ lib = Path(sysconfig.get_config_var('base')) / libpath
+ elif pyplat == 'mingw':
+ if self.static:
+ libname = sysconfig.get_config_var('LIBRARY')
+ else:
+ libname = sysconfig.get_config_var('LDLIBRARY')
+ lib = Path(sysconfig.get_config_var('LIBDIR')) / libname
+ if not lib.exists():
+ mlog.log('Could not find Python3 library {!r}'.format(str(lib)))
+ return None
+ return [str(lib)]
+
+ def _find_libpy3_windows(self, env: 'Environment') -> None:
+ '''
+ Find python3 libraries on Windows and also verify that the arch matches
+ what we are building for.
+ '''
+ pyarch = self.get_windows_python_arch()
+ if pyarch is None:
+ self.is_found = False
+ return
+ arch = detect_cpu_family(env.coredata.compilers.host)
+ if arch == 'x86':
+ arch = '32'
+ elif arch == 'x86_64':
+ arch = '64'
+ else:
+ # We can't cross-compile Python 3 dependencies on Windows yet
+ mlog.log(f'Unknown architecture {arch!r} for',
+ mlog.bold(self.name))
+ self.is_found = False
+ return
+ # Pyarch ends in '32' or '64'
+ if arch != pyarch:
+ mlog.log('Need', mlog.bold(self.name), 'for {}-bit, but '
+ 'found {}-bit'.format(arch, pyarch))
+ self.is_found = False
+ return
+ # This can fail if the library is not found
+ largs = self.get_windows_link_args()
+ if largs is None:
+ self.is_found = False
+ return
+ self.link_args = largs
+ # Compile args
+ inc = sysconfig.get_path('include')
+ platinc = sysconfig.get_path('platinclude')
+ self.compile_args = ['-I' + inc]
+ if inc != platinc:
+ self.compile_args.append('-I' + platinc)
+ self.version = sysconfig.get_config_var('py_version')
+ self.is_found = True
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ if mesonlib.is_windows():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSCONFIG]
+ elif mesonlib.is_osx():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.EXTRAFRAMEWORK]
+ else:
+ return [DependencyMethods.PKGCONFIG]
+
+ def log_tried(self) -> str:
+ return 'sysconfig'
+
+class PcapDependencyConfigTool(ConfigToolDependency):
+
+ tools = ['pcap-config']
+ tool_name = 'pcap-config'
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
+ self.version = self.get_pcap_lib_version()
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
+
+ def get_pcap_lib_version(self) -> T.Optional[str]:
+ # Since we seem to need to run a program to discover the pcap version,
+ # we can't do that when cross-compiling
+ # FIXME: this should be handled if we have an exe_wrapper
+ if not self.env.machines.matches_build_machine(self.for_machine):
+ return None
+
+ v = self.clib_compiler.get_return_value('pcap_lib_version', 'string',
+ '#include <pcap.h>', self.env, [], [self])
+ v = re.sub(r'libpcap version ', '', str(v))
+ v = re.sub(r' -- Apple version.*$', '', v)
+ return v
+
+
+class CupsDependencyConfigTool(ConfigToolDependency):
+
+ tools = ['cups-config']
+ tool_name = 'cups-config'
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--ldflags', '--libs'], 'link_args')
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ if mesonlib.is_osx():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK, DependencyMethods.CMAKE]
+ else:
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.CMAKE]
+
+
+class LibWmfDependencyConfigTool(ConfigToolDependency):
+
+ tools = ['libwmf-config']
+ tool_name = 'libwmf-config'
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
+
+
+class LibGCryptDependencyConfigTool(ConfigToolDependency):
+
+ tools = ['libgcrypt-config']
+ tool_name = 'libgcrypt-config'
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
+ self.version = self.get_config_value(['--version'], 'version')[0]
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
+
+
+class GpgmeDependencyConfigTool(ConfigToolDependency):
+
+ tools = ['gpgme-config']
+ tool_name = 'gpg-config'
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
+ self.version = self.get_config_value(['--version'], 'version')[0]
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
+
+
+class ShadercDependency(SystemDependency):
+
+ def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__('shaderc', environment, kwargs)
+
+ static_lib = 'shaderc_combined'
+ shared_lib = 'shaderc_shared'
+
+ libs = [shared_lib, static_lib]
+ if self.static:
+ libs.reverse()
+
+ cc = self.get_compiler()
+
+ for lib in libs:
+ self.link_args = cc.find_library(lib, environment, [])
+ if self.link_args is not None:
+ self.is_found = True
+
+ if self.static and lib != static_lib:
+ mlog.warning(f'Static library {static_lib!r} not found for dependency '
+ f'{self.name!r}, may not be statically linked')
+
+ break
+
+ def log_tried(self) -> str:
+ return 'system'
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.SYSTEM, DependencyMethods.PKGCONFIG]
+
+
+class CursesConfigToolDependency(ConfigToolDependency):
+
+ """Use the curses config tools."""
+
+ tool = 'curses-config'
+ # ncurses5.4-config is for macOS Catalina
+ tools = ['ncursesw6-config', 'ncursesw5-config', 'ncurses6-config', 'ncurses5-config', 'ncurses5.4-config']
+
+ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None):
+ super().__init__(name, env, kwargs, language)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
+
+
+class CursesSystemDependency(SystemDependency):
+
+ """Curses dependency the hard way.
+
+ This replaces hand rolled find_library() and has_header() calls. We
+ provide this for portability reasons, there are a large number of curses
+ implementations, and the differences between them can be very annoying.
+ """
+
+ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, env, kwargs)
+
+ candidates = [
+ ('pdcurses', ['pdcurses/curses.h']),
+ ('ncursesw', ['ncursesw/ncurses.h', 'ncurses.h']),
+ ('ncurses', ['ncurses/ncurses.h', 'ncurses/curses.h', 'ncurses.h']),
+ ('curses', ['curses.h']),
+ ]
+
+ # Not sure how else to elegently break out of both loops
+ for lib, headers in candidates:
+ l = self.clib_compiler.find_library(lib, env, [])
+ if l:
+ for header in headers:
+ h = self.clib_compiler.has_header(header, '', env)
+ if h[0]:
+ self.is_found = True
+ self.link_args = l
+ # Not sure how to find version for non-ncurses curses
+ # implementations. The one in illumos/OpenIndiana
+ # doesn't seem to have a version defined in the header.
+ if lib.startswith('ncurses'):
+ v, _ = self.clib_compiler.get_define('NCURSES_VERSION', f'#include <{header}>', env, [], [self])
+ self.version = v.strip('"')
+ if lib.startswith('pdcurses'):
+ v_major, _ = self.clib_compiler.get_define('PDC_VER_MAJOR', f'#include <{header}>', env, [], [self])
+ v_minor, _ = self.clib_compiler.get_define('PDC_VER_MINOR', f'#include <{header}>', env, [], [self])
+ self.version = f'{v_major}.{v_minor}'
+
+ # Check the version if possible, emit a wraning if we can't
+ req = kwargs.get('version')
+ if req:
+ if self.version:
+ self.is_found = mesonlib.version_compare(self.version, req)
+ else:
+ mlog.warning('Cannot determine version of curses to compare against.')
+
+ if self.is_found:
+ mlog.debug('Curses library:', l)
+ mlog.debug('Curses header:', header)
+ break
+ if self.is_found:
+ break
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.SYSTEM]
+
+
+class IntlBuiltinDependency(BuiltinDependency):
+ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, env, kwargs)
+
+ if self.clib_compiler.has_function('ngettext', '', env)[0]:
+ self.is_found = True
+
+
+class IntlSystemDependency(SystemDependency):
+ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, env, kwargs)
+
+ h = self.clib_compiler.has_header('libintl.h', '', env)
+ self.link_args = self.clib_compiler.find_library('intl', env, [])
+
+ if h and self.link_args:
+ self.is_found = True
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.SYSTEM})
+def curses_factory(env: 'Environment',
+ for_machine: 'MachineChoice',
+ kwargs: T.Dict[str, T.Any],
+ methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+ candidates: T.List['DependencyGenerator'] = []
+
+ if DependencyMethods.PKGCONFIG in methods:
+ pkgconfig_files = ['pdcurses', 'ncursesw', 'ncurses', 'curses']
+ for pkg in pkgconfig_files:
+ candidates.append(functools.partial(PkgConfigDependency, pkg, env, kwargs))
+
+ # There are path handling problems with these methods on msys, and they
+ # don't apply to windows otherwise (cygwin is handled separately from
+ # windows)
+ if not env.machines[for_machine].is_windows():
+ if DependencyMethods.CONFIG_TOOL in methods:
+ candidates.append(functools.partial(CursesConfigToolDependency, 'curses', env, kwargs))
+
+ if DependencyMethods.SYSTEM in methods:
+ candidates.append(functools.partial(CursesSystemDependency, 'curses', env, kwargs))
+
+ return candidates
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM})
+def shaderc_factory(env: 'Environment',
+ for_machine: 'MachineChoice',
+ kwargs: T.Dict[str, T.Any],
+ methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+ """Custom DependencyFactory for ShaderC.
+
+ ShaderC's odd you get three different libraries from the same build
+ thing are just easier to represent as a separate function than
+ twisting DependencyFactory even more.
+ """
+ candidates: T.List['DependencyGenerator'] = []
+
+ if DependencyMethods.PKGCONFIG in methods:
+ # ShaderC packages their shared and static libs together
+ # and provides different pkg-config files for each one. We
+ # smooth over this difference by handling the static
+ # keyword before handing off to the pkg-config handler.
+ shared_libs = ['shaderc']
+ static_libs = ['shaderc_combined', 'shaderc_static']
+
+ if kwargs.get('static', False):
+ c = [functools.partial(PkgConfigDependency, name, env, kwargs)
+ for name in static_libs + shared_libs]
+ else:
+ c = [functools.partial(PkgConfigDependency, name, env, kwargs)
+ for name in shared_libs + static_libs]
+ candidates.extend(c)
+
+ if DependencyMethods.SYSTEM in methods:
+ candidates.append(functools.partial(ShadercDependency, env, kwargs))
+
+ return candidates
+
+
+cups_factory = DependencyFactory(
+ 'cups',
+ [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK, DependencyMethods.CMAKE],
+ configtool_class=CupsDependencyConfigTool,
+ cmake_name='Cups',
+)
+
+gpgme_factory = DependencyFactory(
+ 'gpgme',
+ [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+ configtool_class=GpgmeDependencyConfigTool,
+)
+
+libgcrypt_factory = DependencyFactory(
+ 'libgcrypt',
+ [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+ configtool_class=LibGCryptDependencyConfigTool,
+)
+
+libwmf_factory = DependencyFactory(
+ 'libwmf',
+ [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+ configtool_class=LibWmfDependencyConfigTool,
+)
+
+pcap_factory = DependencyFactory(
+ 'pcap',
+ [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+ configtool_class=PcapDependencyConfigTool,
+ pkgconfig_name='libpcap',
+)
+
+python3_factory = DependencyFactory(
+ 'python3',
+ [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM, DependencyMethods.EXTRAFRAMEWORK],
+ system_class=Python3DependencySystem,
+ # There is no version number in the macOS version number
+ framework_name='Python',
+ # There is a python in /System/Library/Frameworks, but thats python 2.x,
+ # Python 3 will always be in /Library
+ extra_kwargs={'paths': ['/Library/Frameworks']},
+)
+
+threads_factory = DependencyFactory(
+ 'threads',
+ [DependencyMethods.SYSTEM, DependencyMethods.CMAKE],
+ cmake_name='Threads',
+ system_class=ThreadDependency,
+)
+
+intl_factory = DependencyFactory(
+ 'intl',
+ [DependencyMethods.BUILTIN, DependencyMethods.SYSTEM],
+ builtin_class=IntlBuiltinDependency,
+ system_class=IntlSystemDependency,
+)
diff --git a/meson/mesonbuild/dependencies/mpi.py b/meson/mesonbuild/dependencies/mpi.py
new file mode 100644
index 000000000..2354767c2
--- /dev/null
+++ b/meson/mesonbuild/dependencies/mpi.py
@@ -0,0 +1,236 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import typing as T
+import os
+import re
+
+from ..environment import detect_cpu_family
+from .base import DependencyMethods, detect_compiler, SystemDependency
+from .configtool import ConfigToolDependency
+from .factory import factory_methods
+from .pkgconfig import PkgConfigDependency
+
+if T.TYPE_CHECKING:
+ from .factory import DependencyGenerator
+ from ..environment import Environment, MachineChoice
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.SYSTEM})
+def mpi_factory(env: 'Environment',
+ for_machine: 'MachineChoice',
+ kwargs: T.Dict[str, T.Any],
+ methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+ language = kwargs.get('language', 'c')
+ if language not in {'c', 'cpp', 'fortran'}:
+ # OpenMPI doesn't work without any other languages
+ return []
+
+ candidates: T.List['DependencyGenerator'] = []
+ compiler = detect_compiler('mpi', env, for_machine, language)
+ if compiler is None:
+ return []
+ compiler_is_intel = compiler.get_id() in {'intel', 'intel-cl'}
+
+ # Only OpenMPI has pkg-config, and it doesn't work with the intel compilers
+ if DependencyMethods.PKGCONFIG in methods and not compiler_is_intel:
+ pkg_name = None
+ if language == 'c':
+ pkg_name = 'ompi-c'
+ elif language == 'cpp':
+ pkg_name = 'ompi-cxx'
+ elif language == 'fortran':
+ pkg_name = 'ompi-fort'
+ candidates.append(functools.partial(
+ PkgConfigDependency, pkg_name, env, kwargs, language=language))
+
+ if DependencyMethods.CONFIG_TOOL in methods:
+ nwargs = kwargs.copy()
+
+ if compiler_is_intel:
+ if env.machines[for_machine].is_windows():
+ nwargs['version_arg'] = '-v'
+ nwargs['returncode_value'] = 3
+
+ if language == 'c':
+ tool_names = [os.environ.get('I_MPI_CC'), 'mpiicc']
+ elif language == 'cpp':
+ tool_names = [os.environ.get('I_MPI_CXX'), 'mpiicpc']
+ elif language == 'fortran':
+ tool_names = [os.environ.get('I_MPI_F90'), 'mpiifort']
+
+ cls = IntelMPIConfigToolDependency # type: T.Type[ConfigToolDependency]
+ else: # OpenMPI, which doesn't work with intel
+ #
+ # We try the environment variables for the tools first, but then
+ # fall back to the hardcoded names
+ if language == 'c':
+ tool_names = [os.environ.get('MPICC'), 'mpicc']
+ elif language == 'cpp':
+ tool_names = [os.environ.get('MPICXX'), 'mpic++', 'mpicxx', 'mpiCC']
+ elif language == 'fortran':
+ tool_names = [os.environ.get(e) for e in ['MPIFC', 'MPIF90', 'MPIF77']]
+ tool_names.extend(['mpifort', 'mpif90', 'mpif77'])
+
+ cls = OpenMPIConfigToolDependency
+
+ tool_names = [t for t in tool_names if t] # remove empty environment variables
+ assert tool_names
+
+ nwargs['tools'] = tool_names
+ candidates.append(functools.partial(
+ cls, tool_names[0], env, nwargs, language=language))
+
+ if DependencyMethods.SYSTEM in methods:
+ candidates.append(functools.partial(
+ MSMPIDependency, 'msmpi', env, kwargs, language=language))
+
+ return candidates
+
+
+class _MPIConfigToolDependency(ConfigToolDependency):
+
+ def _filter_compile_args(self, args: T.Sequence[str]) -> T.List[str]:
+ """
+ MPI wrappers return a bunch of garbage args.
+ Drop -O2 and everything that is not needed.
+ """
+ result = []
+ multi_args: T.Tuple[str, ...] = ('-I', )
+ if self.language == 'fortran':
+ fc = self.env.coredata.compilers[self.for_machine]['fortran']
+ multi_args += fc.get_module_incdir_args()
+
+ include_next = False
+ for f in args:
+ if f.startswith(('-D', '-f') + multi_args) or f == '-pthread' \
+ or (f.startswith('-W') and f != '-Wall' and not f.startswith('-Werror')):
+ result.append(f)
+ if f in multi_args:
+ # Path is a separate argument.
+ include_next = True
+ elif include_next:
+ include_next = False
+ result.append(f)
+ return result
+
+ def _filter_link_args(self, args: T.Sequence[str]) -> T.List[str]:
+ """
+ MPI wrappers return a bunch of garbage args.
+ Drop -O2 and everything that is not needed.
+ """
+ result = []
+ include_next = False
+ for f in args:
+ if self._is_link_arg(f):
+ result.append(f)
+ if f in ('-L', '-Xlinker'):
+ include_next = True
+ elif include_next:
+ include_next = False
+ result.append(f)
+ return result
+
+ def _is_link_arg(self, f: str) -> bool:
+ if self.clib_compiler.id == 'intel-cl':
+ return f == '/link' or f.startswith('/LIBPATH') or f.endswith('.lib') # always .lib whether static or dynamic
+ else:
+ return (f.startswith(('-L', '-l', '-Xlinker')) or
+ f == '-pthread' or
+ (f.startswith('-W') and f != '-Wall' and not f.startswith('-Werror')))
+
+
+class IntelMPIConfigToolDependency(_MPIConfigToolDependency):
+
+ """Wrapper around Intel's mpiicc and friends."""
+
+ version_arg = '-v' # --version is not the same as -v
+
+ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+ language: T.Optional[str] = None):
+ super().__init__(name, env, kwargs, language=language)
+ if not self.is_found:
+ return
+
+ args = self.get_config_value(['-show'], 'link and compile args')
+ self.compile_args = self._filter_compile_args(args)
+ self.link_args = self._filter_link_args(args)
+
+ def _sanitize_version(self, out: str) -> str:
+ v = re.search(r'(\d{4}) Update (\d)', out)
+ if v:
+ return '{}.{}'.format(v.group(1), v.group(2))
+ return out
+
+
+class OpenMPIConfigToolDependency(_MPIConfigToolDependency):
+
+ """Wrapper around OpenMPI mpicc and friends."""
+
+ version_arg = '--showme:version'
+
+ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+ language: T.Optional[str] = None):
+ super().__init__(name, env, kwargs, language=language)
+ if not self.is_found:
+ return
+
+ c_args = self.get_config_value(['--showme:compile'], 'compile_args')
+ self.compile_args = self._filter_compile_args(c_args)
+
+ l_args = self.get_config_value(['--showme:link'], 'link_args')
+ self.link_args = self._filter_link_args(l_args)
+
+ def _sanitize_version(self, out: str) -> str:
+ v = re.search(r'\d+.\d+.\d+', out)
+ if v:
+ return v.group(0)
+ return out
+
+
+class MSMPIDependency(SystemDependency):
+
+ """The Microsoft MPI."""
+
+ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+ language: T.Optional[str] = None):
+ super().__init__(name, env, kwargs, language=language)
+ # MSMPI only supports the C API
+ if language not in {'c', 'fortran', None}:
+ self.is_found = False
+ return
+ # MSMPI is only for windows, obviously
+ if not self.env.machines[self.for_machine].is_windows():
+ return
+
+ incdir = os.environ.get('MSMPI_INC')
+ arch = detect_cpu_family(self.env.coredata.compilers.host)
+ libdir = None
+ if arch == 'x86':
+ libdir = os.environ.get('MSMPI_LIB32')
+ post = 'x86'
+ elif arch == 'x86_64':
+ libdir = os.environ.get('MSMPI_LIB64')
+ post = 'x64'
+
+ if libdir is None or incdir is None:
+ self.is_found = False
+ return
+
+ self.is_found = True
+ self.link_args = ['-l' + os.path.join(libdir, 'msmpi')]
+ self.compile_args = ['-I' + incdir, '-I' + os.path.join(incdir, post)]
+ if self.language == 'fortran':
+ self.link_args.append('-l' + os.path.join(libdir, 'msmpifec'))
diff --git a/meson/mesonbuild/dependencies/pkgconfig.py b/meson/mesonbuild/dependencies/pkgconfig.py
new file mode 100644
index 000000000..f09750467
--- /dev/null
+++ b/meson/mesonbuild/dependencies/pkgconfig.py
@@ -0,0 +1,503 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .base import ExternalDependency, DependencyException, DependencyMethods, sort_libpaths, DependencyTypeName
+from ..mesonlib import LibType, MachineChoice, OptionKey, OrderedSet, PerMachine, Popen_safe
+from ..programs import find_external_program, ExternalProgram
+from .. import mlog
+from pathlib import PurePath
+import re
+import os
+import shlex
+import typing as T
+
+if T.TYPE_CHECKING:
+ from ..environment import Environment
+
+class PkgConfigDependency(ExternalDependency):
+ # The class's copy of the pkg-config path. Avoids having to search for it
+ # multiple times in the same Meson invocation.
+ class_pkgbin: PerMachine[T.Union[None, bool, ExternalProgram]] = PerMachine(None, None)
+ # We cache all pkg-config subprocess invocations to avoid redundant calls
+ pkgbin_cache: T.Dict[
+ T.Tuple[ExternalProgram, T.Tuple[str, ...], T.FrozenSet[T.Tuple[str, str]]],
+ T.Tuple[int, str, str]
+ ] = {}
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+ super().__init__(DependencyTypeName('pkgconfig'), environment, kwargs, language=language)
+ self.name = name
+ self.is_libtool = False
+ # Store a copy of the pkg-config path on the object itself so it is
+ # stored in the pickled coredata and recovered.
+ self.pkgbin: T.Union[None, bool, ExternalProgram] = None
+
+ # Only search for pkg-config for each machine the first time and store
+ # the result in the class definition
+ if PkgConfigDependency.class_pkgbin[self.for_machine] is False:
+ mlog.debug('Pkg-config binary for %s is cached as not found.' % self.for_machine)
+ elif PkgConfigDependency.class_pkgbin[self.for_machine] is not None:
+ mlog.debug('Pkg-config binary for %s is cached.' % self.for_machine)
+ else:
+ assert PkgConfigDependency.class_pkgbin[self.for_machine] is None
+ mlog.debug('Pkg-config binary for %s is not cached.' % self.for_machine)
+ for potential_pkgbin in find_external_program(
+ self.env, self.for_machine, 'pkgconfig', 'Pkg-config',
+ environment.default_pkgconfig, allow_default_for_cross=False):
+ version_if_ok = self.check_pkgconfig(potential_pkgbin)
+ if not version_if_ok:
+ continue
+ if not self.silent:
+ mlog.log('Found pkg-config:', mlog.bold(potential_pkgbin.get_path()),
+ '(%s)' % version_if_ok)
+ PkgConfigDependency.class_pkgbin[self.for_machine] = potential_pkgbin
+ break
+ else:
+ if not self.silent:
+ mlog.log('Found Pkg-config:', mlog.red('NO'))
+ # Set to False instead of None to signify that we've already
+ # searched for it and not found it
+ PkgConfigDependency.class_pkgbin[self.for_machine] = False
+
+ self.pkgbin = PkgConfigDependency.class_pkgbin[self.for_machine]
+ if self.pkgbin is False:
+ self.pkgbin = None
+ msg = 'Pkg-config binary for machine %s not found. Giving up.' % self.for_machine
+ if self.required:
+ raise DependencyException(msg)
+ else:
+ mlog.debug(msg)
+ return
+
+ assert isinstance(self.pkgbin, ExternalProgram)
+ mlog.debug('Determining dependency {!r} with pkg-config executable '
+ '{!r}'.format(name, self.pkgbin.get_path()))
+ ret, self.version, _ = self._call_pkgbin(['--modversion', name])
+ if ret != 0:
+ return
+
+ self.is_found = True
+
+ try:
+ # Fetch cargs to be used while using this dependency
+ self._set_cargs()
+ # Fetch the libraries and library paths needed for using this
+ self._set_libs()
+ except DependencyException as e:
+ mlog.debug(f"pkg-config error with '{name}': {e}")
+ if self.required:
+ raise
+ else:
+ self.compile_args = []
+ self.link_args = []
+ self.is_found = False
+ self.reason = e
+
+ def __repr__(self) -> str:
+ s = '<{0} {1}: {2} {3}>'
+ return s.format(self.__class__.__name__, self.name, self.is_found,
+ self.version_reqs)
+
+ def _call_pkgbin_real(self, args: T.List[str], env: T.Dict[str, str]) -> T.Tuple[int, str, str]:
+ assert isinstance(self.pkgbin, ExternalProgram)
+ cmd = self.pkgbin.get_command() + args
+ p, out, err = Popen_safe(cmd, env=env)
+ rc, out, err = p.returncode, out.strip(), err.strip()
+ call = ' '.join(cmd)
+ mlog.debug(f"Called `{call}` -> {rc}\n{out}")
+ return rc, out, err
+
+ @staticmethod
+ def setup_env(env: T.MutableMapping[str, str], environment: 'Environment', for_machine: MachineChoice,
+ extra_path: T.Optional[str] = None) -> None:
+ extra_paths: T.List[str] = environment.coredata.options[OptionKey('pkg_config_path', machine=for_machine)].value[:]
+ if extra_path and extra_path not in extra_paths:
+ extra_paths.append(extra_path)
+ sysroot = environment.properties[for_machine].get_sys_root()
+ if sysroot:
+ env['PKG_CONFIG_SYSROOT_DIR'] = sysroot
+ new_pkg_config_path = ':'.join([p for p in extra_paths])
+ env['PKG_CONFIG_PATH'] = new_pkg_config_path
+
+ pkg_config_libdir_prop = environment.properties[for_machine].get_pkg_config_libdir()
+ if pkg_config_libdir_prop:
+ new_pkg_config_libdir = ':'.join([p for p in pkg_config_libdir_prop])
+ env['PKG_CONFIG_LIBDIR'] = new_pkg_config_libdir
+ # Dump all PKG_CONFIG environment variables
+ for key, value in env.items():
+ if key.startswith('PKG_'):
+ mlog.debug(f'env[{key}]: {value}')
+
+ def _call_pkgbin(self, args: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str, str]:
+ # Always copy the environment since we're going to modify it
+ # with pkg-config variables
+ if env is None:
+ env = os.environ.copy()
+ else:
+ env = env.copy()
+
+ assert isinstance(self.pkgbin, ExternalProgram)
+ PkgConfigDependency.setup_env(env, self.env, self.for_machine)
+
+ fenv = frozenset(env.items())
+ targs = tuple(args)
+ cache = PkgConfigDependency.pkgbin_cache
+ if (self.pkgbin, targs, fenv) not in cache:
+ cache[(self.pkgbin, targs, fenv)] = self._call_pkgbin_real(args, env)
+ return cache[(self.pkgbin, targs, fenv)]
+
+ def _convert_mingw_paths(self, args: T.List[str]) -> T.List[str]:
+ '''
+ Both MSVC and native Python on Windows cannot handle MinGW-esque /c/foo
+ paths so convert them to C:/foo. We cannot resolve other paths starting
+ with / like /home/foo so leave them as-is so that the user gets an
+ error/warning from the compiler/linker.
+ '''
+ if not self.env.machines.build.is_windows():
+ return args
+ converted = []
+ for arg in args:
+ pargs: T.Tuple[str, ...] = tuple()
+ # Library search path
+ if arg.startswith('-L/'):
+ pargs = PurePath(arg[2:]).parts
+ tmpl = '-L{}:/{}'
+ elif arg.startswith('-I/'):
+ pargs = PurePath(arg[2:]).parts
+ tmpl = '-I{}:/{}'
+ # Full path to library or .la file
+ elif arg.startswith('/'):
+ pargs = PurePath(arg).parts
+ tmpl = '{}:/{}'
+ elif arg.startswith(('-L', '-I')) or (len(arg) > 2 and arg[1] == ':'):
+ # clean out improper '\\ ' as comes from some Windows pkg-config files
+ arg = arg.replace('\\ ', ' ')
+ if len(pargs) > 1 and len(pargs[1]) == 1:
+ arg = tmpl.format(pargs[1], '/'.join(pargs[2:]))
+ converted.append(arg)
+ return converted
+
+ def _split_args(self, cmd: str) -> T.List[str]:
+ # pkg-config paths follow Unix conventions, even on Windows; split the
+ # output using shlex.split rather than mesonlib.split_args
+ return shlex.split(cmd)
+
+ def _set_cargs(self) -> None:
+ env = None
+ if self.language == 'fortran':
+ # gfortran doesn't appear to look in system paths for INCLUDE files,
+ # so don't allow pkg-config to suppress -I flags for system paths
+ env = os.environ.copy()
+ env['PKG_CONFIG_ALLOW_SYSTEM_CFLAGS'] = '1'
+ ret, out, err = self._call_pkgbin(['--cflags', self.name], env=env)
+ if ret != 0:
+ raise DependencyException('Could not generate cargs for %s:\n%s\n' %
+ (self.name, err))
+ self.compile_args = self._convert_mingw_paths(self._split_args(out))
+
+ def _search_libs(self, out: str, out_raw: str) -> T.Tuple[T.List[str], T.List[str]]:
+ '''
+ @out: PKG_CONFIG_ALLOW_SYSTEM_LIBS=1 pkg-config --libs
+ @out_raw: pkg-config --libs
+
+ We always look for the file ourselves instead of depending on the
+ compiler to find it with -lfoo or foo.lib (if possible) because:
+ 1. We want to be able to select static or shared
+ 2. We need the full path of the library to calculate RPATH values
+ 3. De-dup of libraries is easier when we have absolute paths
+
+ Libraries that are provided by the toolchain or are not found by
+ find_library() will be added with -L -l pairs.
+ '''
+ # Library paths should be safe to de-dup
+ #
+ # First, figure out what library paths to use. Originally, we were
+ # doing this as part of the loop, but due to differences in the order
+ # of -L values between pkg-config and pkgconf, we need to do that as
+ # a separate step. See:
+ # https://github.com/mesonbuild/meson/issues/3951
+ # https://github.com/mesonbuild/meson/issues/4023
+ #
+ # Separate system and prefix paths, and ensure that prefix paths are
+ # always searched first.
+ prefix_libpaths: OrderedSet[str] = OrderedSet()
+ # We also store this raw_link_args on the object later
+ raw_link_args = self._convert_mingw_paths(self._split_args(out_raw))
+ for arg in raw_link_args:
+ if arg.startswith('-L') and not arg.startswith(('-L-l', '-L-L')):
+ path = arg[2:]
+ if not os.path.isabs(path):
+ # Resolve the path as a compiler in the build directory would
+ path = os.path.join(self.env.get_build_dir(), path)
+ prefix_libpaths.add(path)
+ # Library paths are not always ordered in a meaningful way
+ #
+ # Instead of relying on pkg-config or pkgconf to provide -L flags in a
+ # specific order, we reorder library paths ourselves, according to th
+ # order specified in PKG_CONFIG_PATH. See:
+ # https://github.com/mesonbuild/meson/issues/4271
+ #
+ # Only prefix_libpaths are reordered here because there should not be
+ # too many system_libpaths to cause library version issues.
+ pkg_config_path: T.List[str] = self.env.coredata.options[OptionKey('pkg_config_path', machine=self.for_machine)].value
+ pkg_config_path = self._convert_mingw_paths(pkg_config_path)
+ prefix_libpaths = OrderedSet(sort_libpaths(list(prefix_libpaths), pkg_config_path))
+ system_libpaths: OrderedSet[str] = OrderedSet()
+ full_args = self._convert_mingw_paths(self._split_args(out))
+ for arg in full_args:
+ if arg.startswith(('-L-l', '-L-L')):
+ # These are D language arguments, not library paths
+ continue
+ if arg.startswith('-L') and arg[2:] not in prefix_libpaths:
+ system_libpaths.add(arg[2:])
+ # Use this re-ordered path list for library resolution
+ libpaths = list(prefix_libpaths) + list(system_libpaths)
+ # Track -lfoo libraries to avoid duplicate work
+ libs_found: OrderedSet[str] = OrderedSet()
+ # Track not-found libraries to know whether to add library paths
+ libs_notfound = []
+ libtype = LibType.STATIC if self.static else LibType.PREFER_SHARED
+ # Generate link arguments for this library
+ link_args = []
+ for lib in full_args:
+ if lib.startswith(('-L-l', '-L-L')):
+ # These are D language arguments, add them as-is
+ pass
+ elif lib.startswith('-L'):
+ # We already handled library paths above
+ continue
+ elif lib.startswith('-l:'):
+ # see: https://stackoverflow.com/questions/48532868/gcc-library-option-with-a-colon-llibevent-a
+ # also : See the documentation of -lnamespec | --library=namespec in the linker manual
+ # https://sourceware.org/binutils/docs-2.18/ld/Options.html
+
+ # Don't resolve the same -l:libfoo.a argument again
+ if lib in libs_found:
+ continue
+ libfilename = lib[3:]
+ foundname = None
+ for libdir in libpaths:
+ target = os.path.join(libdir, libfilename)
+ if os.path.exists(target):
+ foundname = target
+ break
+ if foundname is None:
+ if lib in libs_notfound:
+ continue
+ else:
+ mlog.warning('Library {!r} not found for dependency {!r}, may '
+ 'not be successfully linked'.format(libfilename, self.name))
+ libs_notfound.append(lib)
+ else:
+ lib = foundname
+ elif lib.startswith('-l'):
+ # Don't resolve the same -lfoo argument again
+ if lib in libs_found:
+ continue
+ if self.clib_compiler:
+ args = self.clib_compiler.find_library(lib[2:], self.env,
+ libpaths, libtype)
+ # If the project only uses a non-clib language such as D, Rust,
+ # C#, Python, etc, all we can do is limp along by adding the
+ # arguments as-is and then adding the libpaths at the end.
+ else:
+ args = None
+ if args is not None:
+ libs_found.add(lib)
+ # Replace -l arg with full path to library if available
+ # else, library is either to be ignored, or is provided by
+ # the compiler, can't be resolved, and should be used as-is
+ if args:
+ if not args[0].startswith('-l'):
+ lib = args[0]
+ else:
+ continue
+ else:
+ # Library wasn't found, maybe we're looking in the wrong
+ # places or the library will be provided with LDFLAGS or
+ # LIBRARY_PATH from the environment (on macOS), and many
+ # other edge cases that we can't account for.
+ #
+ # Add all -L paths and use it as -lfoo
+ if lib in libs_notfound:
+ continue
+ if self.static:
+ mlog.warning('Static library {!r} not found for dependency {!r}, may '
+ 'not be statically linked'.format(lib[2:], self.name))
+ libs_notfound.append(lib)
+ elif lib.endswith(".la"):
+ shared_libname = self.extract_libtool_shlib(lib)
+ shared_lib = os.path.join(os.path.dirname(lib), shared_libname)
+ if not os.path.exists(shared_lib):
+ shared_lib = os.path.join(os.path.dirname(lib), ".libs", shared_libname)
+
+ if not os.path.exists(shared_lib):
+ raise DependencyException('Got a libtools specific "%s" dependencies'
+ 'but we could not compute the actual shared'
+ 'library path' % lib)
+ self.is_libtool = True
+ lib = shared_lib
+ if lib in link_args:
+ continue
+ link_args.append(lib)
+ # Add all -Lbar args if we have -lfoo args in link_args
+ if libs_notfound:
+ # Order of -L flags doesn't matter with ld, but it might with other
+ # linkers such as MSVC, so prepend them.
+ link_args = ['-L' + lp for lp in prefix_libpaths] + link_args
+ return link_args, raw_link_args
+
+ def _set_libs(self) -> None:
+ env = None
+ libcmd = ['--libs']
+
+ if self.static:
+ libcmd.append('--static')
+
+ libcmd.append(self.name)
+
+ # Force pkg-config to output -L fields even if they are system
+ # paths so we can do manual searching with cc.find_library() later.
+ env = os.environ.copy()
+ env['PKG_CONFIG_ALLOW_SYSTEM_LIBS'] = '1'
+ ret, out, err = self._call_pkgbin(libcmd, env=env)
+ if ret != 0:
+ raise DependencyException('Could not generate libs for %s:\n%s\n' %
+ (self.name, err))
+ # Also get the 'raw' output without -Lfoo system paths for adding -L
+ # args with -lfoo when a library can't be found, and also in
+ # gnome.generate_gir + gnome.gtkdoc which need -L -l arguments.
+ ret, out_raw, err_raw = self._call_pkgbin(libcmd)
+ if ret != 0:
+ raise DependencyException('Could not generate libs for %s:\n\n%s' %
+ (self.name, out_raw))
+ self.link_args, self.raw_link_args = self._search_libs(out, out_raw)
+
+ def get_pkgconfig_variable(self, variable_name: str, kwargs: T.Dict[str, T.Union[str, T.List[str]]]) -> str:
+ options = ['--variable=' + variable_name, self.name]
+
+ if 'define_variable' in kwargs:
+ definition = kwargs.get('define_variable', [])
+ if not isinstance(definition, list):
+ raise DependencyException('define_variable takes a list')
+
+ if len(definition) != 2 or not all(isinstance(i, str) for i in definition):
+ raise DependencyException('define_variable must be made up of 2 strings for VARIABLENAME and VARIABLEVALUE')
+
+ options = ['--define-variable=' + '='.join(definition)] + options
+
+ ret, out, err = self._call_pkgbin(options)
+ variable = ''
+ if ret != 0:
+ if self.required:
+ raise DependencyException('dependency %s not found:\n%s\n' %
+ (self.name, err))
+ else:
+ variable = out.strip()
+
+ # pkg-config doesn't distinguish between empty and non-existent variables
+ # use the variable list to check for variable existence
+ if not variable:
+ ret, out, _ = self._call_pkgbin(['--print-variables', self.name])
+ if not re.search(r'^' + variable_name + r'$', out, re.MULTILINE):
+ if 'default' in kwargs:
+ assert isinstance(kwargs['default'], str)
+ variable = kwargs['default']
+ else:
+ mlog.warning(f"pkgconfig variable '{variable_name}' not defined for dependency {self.name}.")
+
+ mlog.debug(f'Got pkgconfig variable {variable_name} : {variable}')
+ return variable
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.PKGCONFIG]
+
+ def check_pkgconfig(self, pkgbin: ExternalProgram) -> T.Optional[str]:
+ if not pkgbin.found():
+ mlog.log(f'Did not find pkg-config by name {pkgbin.name!r}')
+ return None
+ try:
+ p, out = Popen_safe(pkgbin.get_command() + ['--version'])[0:2]
+ if p.returncode != 0:
+ mlog.warning('Found pkg-config {!r} but it failed when run'
+ ''.format(' '.join(pkgbin.get_command())))
+ return None
+ except FileNotFoundError:
+ mlog.warning('We thought we found pkg-config {!r} but now it\'s not there. How odd!'
+ ''.format(' '.join(pkgbin.get_command())))
+ return None
+ except PermissionError:
+ msg = 'Found pkg-config {!r} but didn\'t have permissions to run it.'.format(' '.join(pkgbin.get_command()))
+ if not self.env.machines.build.is_windows():
+ msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.'
+ mlog.warning(msg)
+ return None
+ return out.strip()
+
+ def extract_field(self, la_file: str, fieldname: str) -> T.Optional[str]:
+ with open(la_file, encoding='utf-8') as f:
+ for line in f:
+ arr = line.strip().split('=')
+ if arr[0] == fieldname:
+ return arr[1][1:-1]
+ return None
+
+ def extract_dlname_field(self, la_file: str) -> T.Optional[str]:
+ return self.extract_field(la_file, 'dlname')
+
+ def extract_libdir_field(self, la_file: str) -> T.Optional[str]:
+ return self.extract_field(la_file, 'libdir')
+
+ def extract_libtool_shlib(self, la_file: str) -> T.Optional[str]:
+ '''
+ Returns the path to the shared library
+ corresponding to this .la file
+ '''
+ dlname = self.extract_dlname_field(la_file)
+ if dlname is None:
+ return None
+
+ # Darwin uses absolute paths where possible; since the libtool files never
+ # contain absolute paths, use the libdir field
+ if self.env.machines[self.for_machine].is_darwin():
+ dlbasename = os.path.basename(dlname)
+ libdir = self.extract_libdir_field(la_file)
+ if libdir is None:
+ return dlbasename
+ return os.path.join(libdir, dlbasename)
+ # From the comments in extract_libtool(), older libtools had
+ # a path rather than the raw dlname
+ return os.path.basename(dlname)
+
+ def log_tried(self) -> str:
+ return self.type_name
+
+ def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
+ configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+ default_value: T.Optional[str] = None,
+ pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]:
+ if pkgconfig:
+ kwargs: T.Dict[str, T.Union[str, T.List[str]]] = {}
+ if default_value is not None:
+ kwargs['default'] = default_value
+ if pkgconfig_define is not None:
+ kwargs['define_variable'] = pkgconfig_define
+ try:
+ return self.get_pkgconfig_variable(pkgconfig, kwargs)
+ except DependencyException:
+ pass
+ if default_value is not None:
+ return default_value
+ raise DependencyException(f'Could not get pkg-config variable and no default provided for {self!r}')
diff --git a/meson/mesonbuild/dependencies/platform.py b/meson/mesonbuild/dependencies/platform.py
new file mode 100644
index 000000000..7759b0f93
--- /dev/null
+++ b/meson/mesonbuild/dependencies/platform.py
@@ -0,0 +1,58 @@
+# Copyright 2013-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies that are
+# platform-specific (generally speaking).
+
+from .base import DependencyTypeName, ExternalDependency, DependencyException
+from ..mesonlib import MesonException
+import typing as T
+
+if T.TYPE_CHECKING:
+ from ..environment import Environment
+
+class AppleFrameworks(ExternalDependency):
+ def __init__(self, env: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+ super().__init__(DependencyTypeName('appleframeworks'), env, kwargs)
+ modules = kwargs.get('modules', [])
+ if isinstance(modules, str):
+ modules = [modules]
+ if not modules:
+ raise DependencyException("AppleFrameworks dependency requires at least one module.")
+ self.frameworks = modules
+ if not self.clib_compiler:
+ raise DependencyException('No C-like compilers are available, cannot find the framework')
+ self.is_found = True
+ for f in self.frameworks:
+ try:
+ args = self.clib_compiler.find_framework(f, env, [])
+ except MesonException as e:
+ if 'non-clang' in str(e):
+ self.is_found = False
+ self.link_args = []
+ self.compile_args = []
+ return
+ raise
+
+ if args is not None:
+ # No compile args are needed for system frameworks
+ self.link_args += args
+ else:
+ self.is_found = False
+
+ def log_info(self) -> str:
+ return ', '.join(self.frameworks)
+
+ def log_tried(self) -> str:
+ return 'framework'
diff --git a/meson/mesonbuild/dependencies/qt.py b/meson/mesonbuild/dependencies/qt.py
new file mode 100644
index 000000000..4eef71e33
--- /dev/null
+++ b/meson/mesonbuild/dependencies/qt.py
@@ -0,0 +1,438 @@
+# Copyright 2013-2017 The Meson development team
+# Copyright © 2021 Intel Corporation
+# SPDX-license-identifier: Apache-2.0
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Dependency finders for the Qt framework."""
+
+import abc
+import re
+import os
+import typing as T
+
+from .base import DependencyException, DependencyMethods
+from .configtool import ConfigToolDependency
+from .framework import ExtraFrameworkDependency
+from .pkgconfig import PkgConfigDependency
+from .factory import DependencyFactory
+from .. import mlog
+from .. import mesonlib
+
+if T.TYPE_CHECKING:
+ from ..compilers import Compiler
+ from ..envconfig import MachineInfo
+ from ..environment import Environment
+
+
+def _qt_get_private_includes(mod_inc_dir: str, module: str, mod_version: str) -> T.List[str]:
+ # usually Qt5 puts private headers in /QT_INSTALL_HEADERS/module/VERSION/module/private
+ # except for at least QtWebkit and Enginio where the module version doesn't match Qt version
+ # as an example with Qt 5.10.1 on linux you would get:
+ # /usr/include/qt5/QtCore/5.10.1/QtCore/private/
+ # /usr/include/qt5/QtWidgets/5.10.1/QtWidgets/private/
+ # /usr/include/qt5/QtWebKit/5.212.0/QtWebKit/private/
+
+ # on Qt4 when available private folder is directly in module folder
+ # like /usr/include/QtCore/private/
+ if int(mod_version.split('.')[0]) < 5:
+ return []
+
+ private_dir = os.path.join(mod_inc_dir, mod_version)
+ # fallback, let's try to find a directory with the latest version
+ if not os.path.exists(private_dir):
+ dirs = [filename for filename in os.listdir(mod_inc_dir)
+ if os.path.isdir(os.path.join(mod_inc_dir, filename))]
+
+ for dirname in sorted(dirs, reverse=True):
+ if len(dirname.split('.')) == 3:
+ private_dir = dirname
+ break
+ return [private_dir, os.path.join(private_dir, 'Qt' + module)]
+
+
+def get_qmake_host_bins(qvars: T.Dict[str, str]) -> str:
+ # Prefer QT_HOST_BINS (qt5, correct for cross and native compiling)
+ # but fall back to QT_INSTALL_BINS (qt4)
+ if 'QT_HOST_BINS' in qvars:
+ return qvars['QT_HOST_BINS']
+ return qvars['QT_INSTALL_BINS']
+
+
+def _get_modules_lib_suffix(version: str, info: 'MachineInfo', is_debug: bool) -> str:
+ """Get the module suffix based on platform and debug type."""
+ suffix = ''
+ if info.is_windows():
+ if is_debug:
+ suffix += 'd'
+ if version.startswith('4'):
+ suffix += '4'
+ if info.is_darwin():
+ if is_debug:
+ suffix += '_debug'
+ if mesonlib.version_compare(version, '>= 5.14.0'):
+ if info.is_android():
+ if info.cpu_family == 'x86':
+ suffix += '_x86'
+ elif info.cpu_family == 'x86_64':
+ suffix += '_x86_64'
+ elif info.cpu_family == 'arm':
+ suffix += '_armeabi-v7a'
+ elif info.cpu_family == 'aarch64':
+ suffix += '_arm64-v8a'
+ else:
+ mlog.warning(f'Android target arch "{info.cpu_family}"" for Qt5 is unknown, '
+ 'module detection may not work')
+ return suffix
+
+
+class QtExtraFrameworkDependency(ExtraFrameworkDependency):
+ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None):
+ super().__init__(name, env, kwargs, language=language)
+ self.mod_name = name[2:]
+
+ def get_compile_args(self, with_private_headers: bool = False, qt_version: str = "0") -> T.List[str]:
+ if self.found():
+ mod_inc_dir = os.path.join(self.framework_path, 'Headers')
+ args = ['-I' + mod_inc_dir]
+ if with_private_headers:
+ args += ['-I' + dirname for dirname in _qt_get_private_includes(mod_inc_dir, self.mod_name, qt_version)]
+ return args
+ return []
+
+
+class _QtBase:
+
+ """Mixin class for shared componenets between PkgConfig and Qmake."""
+
+ link_args: T.List[str]
+ clib_compiler: 'Compiler'
+ env: 'Environment'
+
+ def __init__(self, name: str, kwargs: T.Dict[str, T.Any]):
+ self.qtname = name.capitalize()
+ self.qtver = name[-1]
+ if self.qtver == "4":
+ self.qtpkgname = 'Qt'
+ else:
+ self.qtpkgname = self.qtname
+
+ self.private_headers = T.cast(bool, kwargs.get('private_headers', False))
+
+ self.requested_modules = mesonlib.stringlistify(mesonlib.extract_as_list(kwargs, 'modules'))
+ if not self.requested_modules:
+ raise DependencyException('No ' + self.qtname + ' modules specified.')
+
+ self.qtmain = T.cast(bool, kwargs.get('main', False))
+ if not isinstance(self.qtmain, bool):
+ raise DependencyException('"main" argument must be a boolean')
+
+ def _link_with_qtmain(self, is_debug: bool, libdir: T.Union[str, T.List[str]]) -> bool:
+ libdir = mesonlib.listify(libdir) # TODO: shouldn't be necessary
+ base_name = 'qtmaind' if is_debug else 'qtmain'
+ qtmain = self.clib_compiler.find_library(base_name, self.env, libdir)
+ if qtmain:
+ self.link_args.append(qtmain[0])
+ return True
+ return False
+
+ def get_exe_args(self, compiler: 'Compiler') -> T.List[str]:
+ # Originally this was -fPIE but nowadays the default
+ # for upstream and distros seems to be -reduce-relocations
+ # which requires -fPIC. This may cause a performance
+ # penalty when using self-built Qt or on platforms
+ # where -fPIC is not required. If this is an issue
+ # for you, patches are welcome.
+ return compiler.get_pic_args()
+
+ def log_details(self) -> str:
+ return f'modules: {", ".join(sorted(self.requested_modules))}'
+
+
+class QtPkgConfigDependency(_QtBase, PkgConfigDependency, metaclass=abc.ABCMeta):
+
+ """Specialization of the PkgConfigDependency for Qt."""
+
+ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+ _QtBase.__init__(self, name, kwargs)
+
+ # Always use QtCore as the "main" dependency, since it has the extra
+ # pkg-config variables that a user would expect to get. If "Core" is
+ # not a requested module, delete the compile and link arguments to
+ # avoid linking with something they didn't ask for
+ PkgConfigDependency.__init__(self, self.qtpkgname + 'Core', env, kwargs)
+ if 'Core' not in self.requested_modules:
+ self.compile_args = []
+ self.link_args = []
+
+ for m in self.requested_modules:
+ mod = PkgConfigDependency(self.qtpkgname + m, self.env, kwargs, language=self.language)
+ if not mod.found():
+ self.is_found = False
+ return
+ if self.private_headers:
+ qt_inc_dir = mod.get_pkgconfig_variable('includedir', {})
+ mod_private_dir = os.path.join(qt_inc_dir, 'Qt' + m)
+ if not os.path.isdir(mod_private_dir):
+ # At least some versions of homebrew don't seem to set this
+ # up correctly. /usr/local/opt/qt/include/Qt + m_name is a
+ # symlink to /usr/local/opt/qt/include, but the pkg-config
+ # file points to /usr/local/Cellar/qt/x.y.z/Headers/, and
+ # the Qt + m_name there is not a symlink, it's a file
+ mod_private_dir = qt_inc_dir
+ mod_private_inc = _qt_get_private_includes(mod_private_dir, m, mod.version)
+ for directory in mod_private_inc:
+ mod.compile_args.append('-I' + directory)
+ self._add_sub_dependency([lambda: mod])
+
+ if self.env.machines[self.for_machine].is_windows() and self.qtmain:
+ # Check if we link with debug binaries
+ debug_lib_name = self.qtpkgname + 'Core' + _get_modules_lib_suffix(self.version, self.env.machines[self.for_machine], True)
+ is_debug = False
+ for arg in self.get_link_args():
+ if arg == f'-l{debug_lib_name}' or arg.endswith(f'{debug_lib_name}.lib') or arg.endswith(f'{debug_lib_name}.a'):
+ is_debug = True
+ break
+ libdir = self.get_pkgconfig_variable('libdir', {})
+ if not self._link_with_qtmain(is_debug, libdir):
+ self.is_found = False
+ return
+
+ self.bindir = self.get_pkgconfig_host_bins(self)
+ if not self.bindir:
+ # If exec_prefix is not defined, the pkg-config file is broken
+ prefix = self.get_pkgconfig_variable('exec_prefix', {})
+ if prefix:
+ self.bindir = os.path.join(prefix, 'bin')
+
+ @staticmethod
+ @abc.abstractmethod
+ def get_pkgconfig_host_bins(core: PkgConfigDependency) -> T.Optional[str]:
+ pass
+
+ @abc.abstractmethod
+ def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+ pass
+
+ def log_info(self) -> str:
+ return 'pkg-config'
+
+
+class QmakeQtDependency(_QtBase, ConfigToolDependency, metaclass=abc.ABCMeta):
+
+ """Find Qt using Qmake as a config-tool."""
+
+ tool_name = 'qmake'
+ version_arg = '-v'
+
+ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+ _QtBase.__init__(self, name, kwargs)
+ self.tools = [f'qmake-{self.qtname}', 'qmake']
+
+ # Add additional constraits that the Qt version is met, but preserve
+ # any version requrements the user has set as well. For exmaple, if Qt5
+ # is requested, add "">= 5, < 6", but if the user has ">= 5.6", don't
+ # lose that.
+ kwargs = kwargs.copy()
+ _vers = mesonlib.listify(kwargs.get('version', []))
+ _vers.extend([f'>= {self.qtver}', f'< {int(self.qtver) + 1}'])
+ kwargs['version'] = _vers
+
+ ConfigToolDependency.__init__(self, name, env, kwargs)
+ if not self.found():
+ return
+
+ # Query library path, header path, and binary path
+ stdo = self.get_config_value(['-query'], 'args')
+ qvars: T.Dict[str, str] = {}
+ for line in stdo:
+ line = line.strip()
+ if line == '':
+ continue
+ k, v = line.split(':', 1)
+ qvars[k] = v
+ # Qt on macOS uses a framework, but Qt for iOS/tvOS does not
+ xspec = qvars.get('QMAKE_XSPEC', '')
+ if self.env.machines.host.is_darwin() and not any(s in xspec for s in ['ios', 'tvos']):
+ mlog.debug("Building for macOS, looking for framework")
+ self._framework_detect(qvars, self.requested_modules, kwargs)
+ # Sometimes Qt is built not as a framework (for instance, when using conan pkg manager)
+ # skip and fall back to normal procedure then
+ if self.is_found:
+ return
+ else:
+ mlog.debug("Building for macOS, couldn't find framework, falling back to library search")
+ incdir = qvars['QT_INSTALL_HEADERS']
+ self.compile_args.append('-I' + incdir)
+ libdir = qvars['QT_INSTALL_LIBS']
+ # Used by qt.compilers_detect()
+ self.bindir = get_qmake_host_bins(qvars)
+
+ # Use the buildtype by default, but look at the b_vscrt option if the
+ # compiler supports it.
+ is_debug = self.env.coredata.get_option(mesonlib.OptionKey('buildtype')) == 'debug'
+ if mesonlib.OptionKey('b_vscrt') in self.env.coredata.options:
+ if self.env.coredata.options[mesonlib.OptionKey('b_vscrt')].value in {'mdd', 'mtd'}:
+ is_debug = True
+ modules_lib_suffix = _get_modules_lib_suffix(self.version, self.env.machines[self.for_machine], is_debug)
+
+ for module in self.requested_modules:
+ mincdir = os.path.join(incdir, 'Qt' + module)
+ self.compile_args.append('-I' + mincdir)
+
+ if module == 'QuickTest':
+ define_base = 'QMLTEST'
+ elif module == 'Test':
+ define_base = 'TESTLIB'
+ else:
+ define_base = module.upper()
+ self.compile_args.append(f'-DQT_{define_base}_LIB')
+
+ if self.private_headers:
+ priv_inc = self.get_private_includes(mincdir, module)
+ for directory in priv_inc:
+ self.compile_args.append('-I' + directory)
+ libfiles = self.clib_compiler.find_library(
+ self.qtpkgname + module + modules_lib_suffix, self.env,
+ mesonlib.listify(libdir)) # TODO: shouldn't be necissary
+ if libfiles:
+ libfile = libfiles[0]
+ else:
+ mlog.log("Could not find:", module,
+ self.qtpkgname + module + modules_lib_suffix,
+ 'in', libdir)
+ self.is_found = False
+ break
+ self.link_args.append(libfile)
+
+ if self.env.machines[self.for_machine].is_windows() and self.qtmain:
+ if not self._link_with_qtmain(is_debug, libdir):
+ self.is_found = False
+
+ def _sanitize_version(self, version: str) -> str:
+ m = re.search(rf'({self.qtver}(\.\d+)+)', version)
+ if m:
+ return m.group(0).rstrip('.')
+ return version
+
+ @abc.abstractmethod
+ def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+ pass
+
+ def _framework_detect(self, qvars: T.Dict[str, str], modules: T.List[str], kwargs: T.Dict[str, T.Any]) -> None:
+ libdir = qvars['QT_INSTALL_LIBS']
+
+ # ExtraFrameworkDependency doesn't support any methods
+ fw_kwargs = kwargs.copy()
+ fw_kwargs.pop('method', None)
+ fw_kwargs['paths'] = [libdir]
+
+ for m in modules:
+ fname = 'Qt' + m
+ mlog.debug('Looking for qt framework ' + fname)
+ fwdep = QtExtraFrameworkDependency(fname, self.env, fw_kwargs, language=self.language)
+ if fwdep.found():
+ self.compile_args.append('-F' + libdir)
+ self.compile_args += fwdep.get_compile_args(with_private_headers=self.private_headers,
+ qt_version=self.version)
+ self.link_args += fwdep.get_link_args()
+ else:
+ self.is_found = False
+ break
+ else:
+ self.is_found = True
+ # Used by self.compilers_detect()
+ self.bindir = get_qmake_host_bins(qvars)
+
+ def log_info(self) -> str:
+ return 'qmake'
+
+
+class Qt4ConfigToolDependency(QmakeQtDependency):
+
+ def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+ return []
+
+
+class Qt5ConfigToolDependency(QmakeQtDependency):
+
+ def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+ return _qt_get_private_includes(mod_inc_dir, module, self.version)
+
+
+class Qt6ConfigToolDependency(QmakeQtDependency):
+
+ def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+ return _qt_get_private_includes(mod_inc_dir, module, self.version)
+
+
+class Qt4PkgConfigDependency(QtPkgConfigDependency):
+
+ @staticmethod
+ def get_pkgconfig_host_bins(core: PkgConfigDependency) -> T.Optional[str]:
+ # Only return one bins dir, because the tools are generally all in one
+ # directory for Qt4, in Qt5, they must all be in one directory. Return
+ # the first one found among the bin variables, in case one tool is not
+ # configured to be built.
+ applications = ['moc', 'uic', 'rcc', 'lupdate', 'lrelease']
+ for application in applications:
+ try:
+ return os.path.dirname(core.get_pkgconfig_variable('%s_location' % application, {}))
+ except mesonlib.MesonException:
+ pass
+ return None
+
+ def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+ return []
+
+
+class Qt5PkgConfigDependency(QtPkgConfigDependency):
+
+ @staticmethod
+ def get_pkgconfig_host_bins(core: PkgConfigDependency) -> str:
+ return core.get_pkgconfig_variable('host_bins', {})
+
+ def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+ return _qt_get_private_includes(mod_inc_dir, module, self.version)
+
+
+class Qt6PkgConfigDependency(QtPkgConfigDependency):
+
+ @staticmethod
+ def get_pkgconfig_host_bins(core: PkgConfigDependency) -> str:
+ return core.get_pkgconfig_variable('host_bins', {})
+
+ def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+ return _qt_get_private_includes(mod_inc_dir, module, self.version)
+
+
+qt4_factory = DependencyFactory(
+ 'qt4',
+ [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+ pkgconfig_class=Qt4PkgConfigDependency,
+ configtool_class=Qt4ConfigToolDependency,
+)
+
+qt5_factory = DependencyFactory(
+ 'qt5',
+ [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+ pkgconfig_class=Qt5PkgConfigDependency,
+ configtool_class=Qt5ConfigToolDependency,
+)
+
+qt6_factory = DependencyFactory(
+ 'qt6',
+ [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+ pkgconfig_class=Qt6PkgConfigDependency,
+ configtool_class=Qt6ConfigToolDependency,
+)
diff --git a/meson/mesonbuild/dependencies/scalapack.py b/meson/mesonbuild/dependencies/scalapack.py
new file mode 100644
index 000000000..707e69850
--- /dev/null
+++ b/meson/mesonbuild/dependencies/scalapack.py
@@ -0,0 +1,153 @@
+# Copyright 2013-2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pathlib import Path
+import functools
+import os
+import typing as T
+
+from .base import DependencyMethods
+from .base import DependencyException
+from .cmake import CMakeDependency
+from .pkgconfig import PkgConfigDependency
+from .factory import factory_methods
+
+if T.TYPE_CHECKING:
+ from ..environment import Environment, MachineChoice
+ from .factory import DependencyGenerator
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE})
+def scalapack_factory(env: 'Environment', for_machine: 'MachineChoice',
+ kwargs: T.Dict[str, T.Any],
+ methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+ candidates: T.List['DependencyGenerator'] = []
+
+ if DependencyMethods.PKGCONFIG in methods:
+ mkl = 'mkl-static-lp64-iomp' if kwargs.get('static', False) else 'mkl-dynamic-lp64-iomp'
+ candidates.append(functools.partial(
+ MKLPkgConfigDependency, mkl, env, kwargs))
+
+ for pkg in ['scalapack-openmpi', 'scalapack']:
+ candidates.append(functools.partial(
+ PkgConfigDependency, pkg, env, kwargs))
+
+ if DependencyMethods.CMAKE in methods:
+ candidates.append(functools.partial(
+ CMakeDependency, 'Scalapack', env, kwargs))
+
+ return candidates
+
+
+class MKLPkgConfigDependency(PkgConfigDependency):
+
+ """PkgConfigDependency for Intel MKL.
+
+ MKL's pkg-config is pretty much borked in every way. We need to apply a
+ bunch of fixups to make it work correctly.
+ """
+
+ def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+ language: T.Optional[str] = None):
+ _m = os.environ.get('MKLROOT')
+ self.__mklroot = Path(_m).resolve() if _m else None
+
+ # We need to call down into the normal super() method even if we don't
+ # find mklroot, otherwise we won't have all of the instance variables
+ # initialized that meson expects.
+ super().__init__(name, env, kwargs, language=language)
+
+ # Doesn't work with gcc on windows, but does on Linux
+ if (not self.__mklroot or (env.machines[self.for_machine].is_windows()
+ and self.clib_compiler.id == 'gcc')):
+ self.is_found = False
+
+ # This can happen either because we're using GCC, we couldn't find the
+ # mklroot, or the pkg-config couldn't find it.
+ if not self.is_found:
+ return
+
+ assert self.version != '', 'This should not happen if we didn\'t return above'
+
+ if self.version == 'unknown':
+ # At least by 2020 the version is in the pkg-config, just not with
+ # the correct name
+ v = self.get_variable(pkgconfig='Version', default_value='')
+
+ if not v and self.__mklroot:
+ try:
+ v = (
+ self.__mklroot.as_posix()
+ .split('compilers_and_libraries_')[1]
+ .split('/', 1)[0]
+ )
+ except IndexError:
+ pass
+
+ if v:
+ assert isinstance(v, str)
+ self.version = v
+
+ def _set_libs(self) -> None:
+ super()._set_libs()
+
+ if self.env.machines[self.for_machine].is_windows():
+ suffix = '.lib'
+ elif self.static:
+ suffix = '.a'
+ else:
+ suffix = ''
+ libdir = self.__mklroot / 'lib/intel64'
+
+ if self.clib_compiler.id == 'gcc':
+ for i, a in enumerate(self.link_args):
+ # only replace in filename, not in directory names
+ dirname, basename = os.path.split(a)
+ if 'mkl_intel_lp64' in basename:
+ basename = basename.replace('intel', 'gf')
+ self.link_args[i] = '/' + os.path.join(dirname, basename)
+ # MKL pkg-config omits scalapack
+ # be sure "-L" and "-Wl" are first if present
+ i = 0
+ for j, a in enumerate(self.link_args):
+ if a.startswith(('-L', '-Wl')):
+ i = j + 1
+ elif j > 3:
+ break
+ if self.env.machines[self.for_machine].is_windows() or self.static:
+ self.link_args.insert(
+ i, str(libdir / ('mkl_scalapack_lp64' + suffix))
+ )
+ self.link_args.insert(
+ i + 1, str(libdir / ('mkl_blacs_intelmpi_lp64' + suffix))
+ )
+ else:
+ self.link_args.insert(i, '-lmkl_scalapack_lp64')
+ self.link_args.insert(i + 1, '-lmkl_blacs_intelmpi_lp64')
+
+ def _set_cargs(self) -> None:
+ env = None
+ if self.language == 'fortran':
+ # gfortran doesn't appear to look in system paths for INCLUDE files,
+ # so don't allow pkg-config to suppress -I flags for system paths
+ env = os.environ.copy()
+ env['PKG_CONFIG_ALLOW_SYSTEM_CFLAGS'] = '1'
+ ret, out, err = self._call_pkgbin([
+ '--cflags', self.name,
+ '--define-variable=prefix=' + self.__mklroot.as_posix()],
+ env=env)
+ if ret != 0:
+ raise DependencyException('Could not generate cargs for %s:\n%s\n' %
+ (self.name, err))
+ self.compile_args = self._convert_mingw_paths(self._split_args(out))
diff --git a/meson/mesonbuild/dependencies/ui.py b/meson/mesonbuild/dependencies/ui.py
new file mode 100644
index 000000000..f256a370c
--- /dev/null
+++ b/meson/mesonbuild/dependencies/ui.py
@@ -0,0 +1,277 @@
+# Copyright 2013-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies that
+# are UI-related.
+import os
+import subprocess
+import typing as T
+
+from .. import mlog
+from .. import mesonlib
+from ..mesonlib import (
+ Popen_safe, extract_as_list, version_compare_many
+)
+from ..environment import detect_cpu_family
+
+from .base import DependencyException, DependencyMethods, DependencyTypeName, SystemDependency
+from .configtool import ConfigToolDependency
+from .factory import DependencyFactory
+
+if T.TYPE_CHECKING:
+ from ..environment import Environment
+
+
+class GLDependencySystem(SystemDependency):
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+ super().__init__(name, environment, kwargs)
+
+ if self.env.machines[self.for_machine].is_darwin():
+ self.is_found = True
+ # FIXME: Use AppleFrameworks dependency
+ self.link_args = ['-framework', 'OpenGL']
+ # FIXME: Detect version using self.clib_compiler
+ return
+ if self.env.machines[self.for_machine].is_windows():
+ self.is_found = True
+ # FIXME: Use self.clib_compiler.find_library()
+ self.link_args = ['-lopengl32']
+ # FIXME: Detect version using self.clib_compiler
+ return
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ if mesonlib.is_osx() or mesonlib.is_windows():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM]
+ else:
+ return [DependencyMethods.PKGCONFIG]
+
+ def log_tried(self) -> str:
+ return 'system'
+
+class GnuStepDependency(ConfigToolDependency):
+
+ tools = ['gnustep-config']
+ tool_name = 'gnustep-config'
+
+ def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+ super().__init__('gnustep', environment, kwargs, language='objc')
+ if not self.is_found:
+ return
+ self.modules = kwargs.get('modules', [])
+ self.compile_args = self.filter_args(
+ self.get_config_value(['--objc-flags'], 'compile_args'))
+ self.link_args = self.weird_filter(self.get_config_value(
+ ['--gui-libs' if 'gui' in self.modules else '--base-libs'],
+ 'link_args'))
+
+ def find_config(self, versions: T.Optional[T.List[str]] = None, returncode: int = 0) -> T.Tuple[T.Optional[T.List[str]], T.Optional[str]]:
+ tool = [self.tools[0]]
+ try:
+ p, out = Popen_safe(tool + ['--help'])[:2]
+ except (FileNotFoundError, PermissionError):
+ return (None, None)
+ if p.returncode != returncode:
+ return (None, None)
+ self.config = tool
+ found_version = self.detect_version()
+ if versions and not version_compare_many(found_version, versions)[0]:
+ return (None, found_version)
+
+ return (tool, found_version)
+
+ @staticmethod
+ def weird_filter(elems: T.List[str]) -> T.List[str]:
+ """When building packages, the output of the enclosing Make is
+ sometimes mixed among the subprocess output. I have no idea why. As a
+ hack filter out everything that is not a flag.
+ """
+ return [e for e in elems if e.startswith('-')]
+
+ @staticmethod
+ def filter_args(args: T.List[str]) -> T.List[str]:
+ """gnustep-config returns a bunch of garbage args such as -O2 and so
+ on. Drop everything that is not needed.
+ """
+ result = []
+ for f in args:
+ if f.startswith('-D') \
+ or f.startswith('-f') \
+ or f.startswith('-I') \
+ or f == '-pthread' \
+ or (f.startswith('-W') and not f == '-Wall'):
+ result.append(f)
+ return result
+
+ def detect_version(self) -> str:
+ gmake = self.get_config_value(['--variable=GNUMAKE'], 'variable')[0]
+ makefile_dir = self.get_config_value(['--variable=GNUSTEP_MAKEFILES'], 'variable')[0]
+ # This Makefile has the GNUStep version set
+ base_make = os.path.join(makefile_dir, 'Additional', 'base.make')
+ # Print the Makefile variable passed as the argument. For instance, if
+ # you run the make target `print-SOME_VARIABLE`, this will print the
+ # value of the variable `SOME_VARIABLE`.
+ printver = "print-%:\n\t@echo '$($*)'"
+ env = os.environ.copy()
+ # See base.make to understand why this is set
+ env['FOUNDATION_LIB'] = 'gnu'
+ p, o, e = Popen_safe([gmake, '-f', '-', '-f', base_make,
+ 'print-GNUSTEP_BASE_VERSION'],
+ env=env, write=printver, stdin=subprocess.PIPE)
+ version = o.strip()
+ if not version:
+ mlog.debug("Couldn't detect GNUStep version, falling back to '1'")
+ # Fallback to setting some 1.x version
+ version = '1'
+ return version
+
+
+class SDL2DependencyConfigTool(ConfigToolDependency):
+
+ tools = ['sdl2-config']
+ tool_name = 'sdl2-config'
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ if mesonlib.is_osx():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK]
+ else:
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL]
+
+
+class WxDependency(ConfigToolDependency):
+
+ tools = ['wx-config-3.0', 'wx-config', 'wx-config-gtk3']
+ tool_name = 'wx-config'
+
+ def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__('WxWidgets', environment, kwargs, language='cpp')
+ if not self.is_found:
+ return
+ self.requested_modules = self.get_requested(kwargs)
+
+ extra_args = []
+ if self.static:
+ extra_args.append('--static=yes')
+
+ # Check to make sure static is going to work
+ err = Popen_safe(self.config + extra_args)[2]
+ if 'No config found to match' in err:
+ mlog.debug('WxWidgets is missing static libraries.')
+ self.is_found = False
+ return
+
+ # wx-config seems to have a cflags as well but since it requires C++,
+ # this should be good, at least for now.
+ self.compile_args = self.get_config_value(['--cxxflags'] + extra_args + self.requested_modules, 'compile_args')
+ self.link_args = self.get_config_value(['--libs'] + extra_args + self.requested_modules, 'link_args')
+
+ @staticmethod
+ def get_requested(kwargs: T.Dict[str, T.Any]) -> T.List[str]:
+ if 'modules' not in kwargs:
+ return []
+ candidates = extract_as_list(kwargs, 'modules')
+ for c in candidates:
+ if not isinstance(c, str):
+ raise DependencyException('wxwidgets module argument is not a string')
+ return candidates
+
+
+class VulkanDependencySystem(SystemDependency):
+
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+ super().__init__(name, environment, kwargs, language=language)
+
+ try:
+ self.vulkan_sdk = os.environ['VULKAN_SDK']
+ if not os.path.isabs(self.vulkan_sdk):
+ raise DependencyException('VULKAN_SDK must be an absolute path.')
+ except KeyError:
+ self.vulkan_sdk = None
+
+ if self.vulkan_sdk:
+ # TODO: this config might not work on some platforms, fix bugs as reported
+ # we should at least detect other 64-bit platforms (e.g. armv8)
+ lib_name = 'vulkan'
+ lib_dir = 'lib'
+ inc_dir = 'include'
+ if mesonlib.is_windows():
+ lib_name = 'vulkan-1'
+ lib_dir = 'Lib32'
+ inc_dir = 'Include'
+ if detect_cpu_family(self.env.coredata.compilers.host) == 'x86_64':
+ lib_dir = 'Lib'
+
+ # make sure header and lib are valid
+ inc_path = os.path.join(self.vulkan_sdk, inc_dir)
+ header = os.path.join(inc_path, 'vulkan', 'vulkan.h')
+ lib_path = os.path.join(self.vulkan_sdk, lib_dir)
+ find_lib = self.clib_compiler.find_library(lib_name, environment, [lib_path])
+
+ if not find_lib:
+ raise DependencyException('VULKAN_SDK point to invalid directory (no lib)')
+
+ if not os.path.isfile(header):
+ raise DependencyException('VULKAN_SDK point to invalid directory (no include)')
+
+ # XXX: this is very odd, and may deserve being removed
+ self.type_name = DependencyTypeName('vulkan_sdk')
+ self.is_found = True
+ self.compile_args.append('-I' + inc_path)
+ self.link_args.append('-L' + lib_path)
+ self.link_args.append('-l' + lib_name)
+
+ # TODO: find a way to retrieve the version from the sdk?
+ # Usually it is a part of the path to it (but does not have to be)
+ return
+ else:
+ # simply try to guess it, usually works on linux
+ libs = self.clib_compiler.find_library('vulkan', environment, [])
+ if libs is not None and self.clib_compiler.has_header('vulkan/vulkan.h', '', environment, disable_cache=True)[0]:
+ self.is_found = True
+ for lib in libs:
+ self.link_args.append(lib)
+ return
+
+ @staticmethod
+ def get_methods() -> T.List[DependencyMethods]:
+ return [DependencyMethods.SYSTEM]
+
+ def log_tried(self) -> str:
+ return 'system'
+
+gl_factory = DependencyFactory(
+ 'gl',
+ [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+ system_class=GLDependencySystem,
+)
+
+sdl2_factory = DependencyFactory(
+ 'sdl2',
+ [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK],
+ configtool_class=SDL2DependencyConfigTool,
+)
+
+vulkan_factory = DependencyFactory(
+ 'vulkan',
+ [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+ system_class=VulkanDependencySystem,
+)
diff --git a/meson/mesonbuild/depfile.py b/meson/mesonbuild/depfile.py
new file mode 100644
index 000000000..62cbe8125
--- /dev/null
+++ b/meson/mesonbuild/depfile.py
@@ -0,0 +1,85 @@
+# Copyright 2019 Red Hat, Inc.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+
+def parse(lines):
+ rules = []
+ targets = []
+ deps = []
+ in_deps = False
+ out = ''
+ for line in lines:
+ if not line.endswith('\n'):
+ line += '\n'
+ escape = None
+ for c in line:
+ if escape:
+ if escape == '$' and c != '$':
+ out += '$'
+ if escape == '\\' and c == '\n':
+ continue
+ out += c
+ escape = None
+ continue
+ if c == '\\' or c == '$':
+ escape = c
+ continue
+ elif c in (' ', '\n'):
+ if out != '':
+ if in_deps:
+ deps.append(out)
+ else:
+ targets.append(out)
+ out = ''
+ if c == '\n':
+ rules.append((targets, deps))
+ targets = []
+ deps = []
+ in_deps = False
+ continue
+ elif c == ':':
+ targets.append(out)
+ out = ''
+ in_deps = True
+ continue
+ out += c
+ return rules
+
+Target = collections.namedtuple('Target', ['deps'])
+
+class DepFile:
+ def __init__(self, lines):
+ rules = parse(lines)
+ depfile = {}
+ for (targets, deps) in rules:
+ for target in targets:
+ t = depfile.setdefault(target, Target(deps=set()))
+ for dep in deps:
+ t.deps.add(dep)
+ self.depfile = depfile
+
+ def get_all_dependencies(self, target, visited=None):
+ deps = set()
+ if not visited:
+ visited = set()
+ if target in visited:
+ return set()
+ visited.add(target)
+ target = self.depfile.get(target)
+ if not target:
+ return set()
+ deps.update(target.deps)
+ for dep in target.deps:
+ deps.update(self.get_all_dependencies(dep, visited))
+ return sorted(deps)
diff --git a/meson/mesonbuild/envconfig.py b/meson/mesonbuild/envconfig.py
new file mode 100644
index 000000000..307aac30e
--- /dev/null
+++ b/meson/mesonbuild/envconfig.py
@@ -0,0 +1,425 @@
+# Copyright 2012-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+import typing as T
+from enum import Enum
+
+from . import mesonlib
+from .mesonlib import EnvironmentException, HoldableObject
+from . import mlog
+from pathlib import Path
+
+
+# These classes contains all the data pulled from configuration files (native
+# and cross file currently), and also assists with the reading environment
+# variables.
+#
+# At this time there isn't an ironclad difference between this an other sources
+# of state like `coredata`. But one rough guide is much what is in `coredata` is
+# the *output* of the configuration process: the final decisions after tests.
+# This, on the other hand has *inputs*. The config files are parsed, but
+# otherwise minimally transformed. When more complex fallbacks (environment
+# detection) exist, they are defined elsewhere as functions that construct
+# instances of these classes.
+
+
+known_cpu_families = (
+ 'aarch64',
+ 'alpha',
+ 'arc',
+ 'arm',
+ 'avr',
+ 'c2000',
+ 'csky',
+ 'dspic',
+ 'e2k',
+ 'ia64',
+ 'loongarch64',
+ 'm68k',
+ 'microblaze',
+ 'mips',
+ 'mips64',
+ 'parisc',
+ 'pic24',
+ 'ppc',
+ 'ppc64',
+ 'riscv32',
+ 'riscv64',
+ 'rl78',
+ 'rx',
+ 's390',
+ 's390x',
+ 'sh4',
+ 'sparc',
+ 'sparc64',
+ 'wasm32',
+ 'wasm64',
+ 'x86',
+ 'x86_64',
+)
+
+# It would feel more natural to call this "64_BIT_CPU_FAMILIES", but
+# python identifiers cannot start with numbers
+CPU_FAMILIES_64_BIT = [
+ 'aarch64',
+ 'alpha',
+ 'ia64',
+ 'loongarch64',
+ 'mips64',
+ 'ppc64',
+ 'riscv64',
+ 's390x',
+ 'sparc64',
+ 'wasm64',
+ 'x86_64',
+]
+
+# Map from language identifiers to environment variables.
+ENV_VAR_PROG_MAP: T.Mapping[str, str] = {
+ # Compilers
+ 'c': 'CC',
+ 'cpp': 'CXX',
+ 'cs': 'CSC',
+ 'd': 'DC',
+ 'fortran': 'FC',
+ 'objc': 'OBJC',
+ 'objcpp': 'OBJCXX',
+ 'rust': 'RUSTC',
+ 'vala': 'VALAC',
+
+ # Linkers
+ 'c_ld': 'CC_LD',
+ 'cpp_ld': 'CXX_LD',
+ 'd_ld': 'DC_LD',
+ 'fortran_ld': 'FC_LD',
+ 'objc_ld': 'OBJC_LD',
+ 'objcpp_ld': 'OBJCXX_LD',
+ 'rust_ld': 'RUSTC_LD',
+
+ # Binutils
+ 'strip': 'STRIP',
+ 'ar': 'AR',
+ 'windres': 'WINDRES',
+
+ # Other tools
+ 'cmake': 'CMAKE',
+ 'qmake': 'QMAKE',
+ 'pkgconfig': 'PKG_CONFIG',
+ 'make': 'MAKE',
+}
+
+# Deprecated environment variables mapped from the new variable to the old one
+# Deprecated in 0.54.0
+DEPRECATED_ENV_PROG_MAP: T.Mapping[str, str] = {
+ 'd_ld': 'D_LD',
+ 'fortran_ld': 'F_LD',
+ 'rust_ld': 'RUST_LD',
+ 'objcpp_ld': 'OBJCPP_LD',
+}
+
+class CMakeSkipCompilerTest(Enum):
+ ALWAYS = 'always'
+ NEVER = 'never'
+ DEP_ONLY = 'dep_only'
+
+class Properties:
+ def __init__(
+ self,
+ properties: T.Optional[T.Dict[str, T.Optional[T.Union[str, bool, int, T.List[str]]]]] = None,
+ ):
+ self.properties = properties or {} # type: T.Dict[str, T.Optional[T.Union[str, bool, int, T.List[str]]]]
+
+ def has_stdlib(self, language: str) -> bool:
+ return language + '_stdlib' in self.properties
+
+ # Some of get_stdlib, get_root, get_sys_root are wider than is actually
+ # true, but without heterogenious dict annotations it's not practical to
+ # narrow them
+ def get_stdlib(self, language: str) -> T.Union[str, T.List[str]]:
+ stdlib = self.properties[language + '_stdlib']
+ if isinstance(stdlib, str):
+ return stdlib
+ assert isinstance(stdlib, list)
+ for i in stdlib:
+ assert isinstance(i, str)
+ return stdlib
+
+ def get_root(self) -> T.Optional[str]:
+ root = self.properties.get('root', None)
+ assert root is None or isinstance(root, str)
+ return root
+
+ def get_sys_root(self) -> T.Optional[str]:
+ sys_root = self.properties.get('sys_root', None)
+ assert sys_root is None or isinstance(sys_root, str)
+ return sys_root
+
+ def get_pkg_config_libdir(self) -> T.Optional[T.List[str]]:
+ p = self.properties.get('pkg_config_libdir', None)
+ if p is None:
+ return p
+ res = mesonlib.listify(p)
+ for i in res:
+ assert isinstance(i, str)
+ return res
+
+ def get_cmake_defaults(self) -> bool:
+ if 'cmake_defaults' not in self.properties:
+ return True
+ res = self.properties['cmake_defaults']
+ assert isinstance(res, bool)
+ return res
+
+ def get_cmake_toolchain_file(self) -> T.Optional[Path]:
+ if 'cmake_toolchain_file' not in self.properties:
+ return None
+ raw = self.properties['cmake_toolchain_file']
+ assert isinstance(raw, str)
+ cmake_toolchain_file = Path(raw)
+ if not cmake_toolchain_file.is_absolute():
+ raise EnvironmentException(f'cmake_toolchain_file ({raw}) is not absolute')
+ return cmake_toolchain_file
+
+ def get_cmake_skip_compiler_test(self) -> CMakeSkipCompilerTest:
+ if 'cmake_skip_compiler_test' not in self.properties:
+ return CMakeSkipCompilerTest.DEP_ONLY
+ raw = self.properties['cmake_skip_compiler_test']
+ assert isinstance(raw, str)
+ try:
+ return CMakeSkipCompilerTest(raw)
+ except ValueError:
+ raise EnvironmentException(
+ '"{}" is not a valid value for cmake_skip_compiler_test. Supported values are {}'
+ .format(raw, [e.value for e in CMakeSkipCompilerTest]))
+
+ def get_cmake_use_exe_wrapper(self) -> bool:
+ if 'cmake_use_exe_wrapper' not in self.properties:
+ return True
+ res = self.properties['cmake_use_exe_wrapper']
+ assert isinstance(res, bool)
+ return res
+
+ def get_java_home(self) -> T.Optional[Path]:
+ value = T.cast(T.Optional[str], self.properties.get('java_home'))
+ return Path(value) if value else None
+
+ def __eq__(self, other: object) -> bool:
+ if isinstance(other, type(self)):
+ return self.properties == other.properties
+ return NotImplemented
+
+ # TODO consider removing so Properties is less freeform
+ def __getitem__(self, key: str) -> T.Optional[T.Union[str, bool, int, T.List[str]]]:
+ return self.properties[key]
+
+ # TODO consider removing so Properties is less freeform
+ def __contains__(self, item: T.Union[str, bool, int, T.List[str]]) -> bool:
+ return item in self.properties
+
+ # TODO consider removing, for same reasons as above
+ def get(self, key: str, default: T.Optional[T.Union[str, bool, int, T.List[str]]] = None) -> T.Optional[T.Union[str, bool, int, T.List[str]]]:
+ return self.properties.get(key, default)
+
+class MachineInfo(HoldableObject):
+ def __init__(self, system: str, cpu_family: str, cpu: str, endian: str):
+ self.system = system
+ self.cpu_family = cpu_family
+ self.cpu = cpu
+ self.endian = endian
+ self.is_64_bit = cpu_family in CPU_FAMILIES_64_BIT # type: bool
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, MachineInfo):
+ return NotImplemented
+ return \
+ self.system == other.system and \
+ self.cpu_family == other.cpu_family and \
+ self.cpu == other.cpu and \
+ self.endian == other.endian
+
+ def __ne__(self, other: object) -> bool:
+ if not isinstance(other, MachineInfo):
+ return NotImplemented
+ return not self.__eq__(other)
+
+ def __repr__(self) -> str:
+ return f'<MachineInfo: {self.system} {self.cpu_family} ({self.cpu})>'
+
+ @classmethod
+ def from_literal(cls, literal: T.Dict[str, str]) -> 'MachineInfo':
+ minimum_literal = {'cpu', 'cpu_family', 'endian', 'system'}
+ if set(literal) < minimum_literal:
+ raise EnvironmentException(
+ f'Machine info is currently {literal}\n' +
+ 'but is missing {}.'.format(minimum_literal - set(literal)))
+
+ cpu_family = literal['cpu_family']
+ if cpu_family not in known_cpu_families:
+ mlog.warning(f'Unknown CPU family {cpu_family}, please report this at https://github.com/mesonbuild/meson/issues/new')
+
+ endian = literal['endian']
+ if endian not in ('little', 'big'):
+ mlog.warning(f'Unknown endian {endian}')
+
+ return cls(literal['system'], cpu_family, literal['cpu'], endian)
+
+ def is_windows(self) -> bool:
+ """
+ Machine is windows?
+ """
+ return self.system == 'windows'
+
+ def is_cygwin(self) -> bool:
+ """
+ Machine is cygwin?
+ """
+ return self.system == 'cygwin'
+
+ def is_linux(self) -> bool:
+ """
+ Machine is linux?
+ """
+ return self.system == 'linux'
+
+ def is_darwin(self) -> bool:
+ """
+ Machine is Darwin (iOS/tvOS/OS X)?
+ """
+ return self.system in {'darwin', 'ios', 'tvos'}
+
+ def is_android(self) -> bool:
+ """
+ Machine is Android?
+ """
+ return self.system == 'android'
+
+ def is_haiku(self) -> bool:
+ """
+ Machine is Haiku?
+ """
+ return self.system == 'haiku'
+
+ def is_netbsd(self) -> bool:
+ """
+ Machine is NetBSD?
+ """
+ return self.system == 'netbsd'
+
+ def is_openbsd(self) -> bool:
+ """
+ Machine is OpenBSD?
+ """
+ return self.system == 'openbsd'
+
+ def is_dragonflybsd(self) -> bool:
+ """Machine is DragonflyBSD?"""
+ return self.system == 'dragonfly'
+
+ def is_freebsd(self) -> bool:
+ """Machine is FreeBSD?"""
+ return self.system == 'freebsd'
+
+ def is_sunos(self) -> bool:
+ """Machine is illumos or Solaris?"""
+ return self.system == 'sunos'
+
+ def is_hurd(self) -> bool:
+ """
+ Machine is GNU/Hurd?
+ """
+ return self.system == 'gnu'
+
+ def is_irix(self) -> bool:
+ """Machine is IRIX?"""
+ return self.system.startswith('irix')
+
+ # Various prefixes and suffixes for import libraries, shared libraries,
+ # static libraries, and executables.
+ # Versioning is added to these names in the backends as-needed.
+ def get_exe_suffix(self) -> str:
+ if self.is_windows() or self.is_cygwin():
+ return 'exe'
+ else:
+ return ''
+
+ def get_object_suffix(self) -> str:
+ if self.is_windows():
+ return 'obj'
+ else:
+ return 'o'
+
+ def libdir_layout_is_win(self) -> bool:
+ return self.is_windows() or self.is_cygwin()
+
+class BinaryTable:
+
+ def __init__(
+ self,
+ binaries: T.Optional[T.Dict[str, T.Union[str, T.List[str]]]] = None,
+ ):
+ self.binaries: T.Dict[str, T.List[str]] = {}
+ if binaries:
+ for name, command in binaries.items():
+ if not isinstance(command, (list, str)):
+ raise mesonlib.MesonException(
+ f'Invalid type {command!r} for entry {name!r} in cross file')
+ self.binaries[name] = mesonlib.listify(command)
+
+ @staticmethod
+ def detect_ccache() -> T.List[str]:
+ try:
+ subprocess.check_call(['ccache', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ except (OSError, subprocess.CalledProcessError):
+ return []
+ return ['ccache']
+
+ @classmethod
+ def parse_entry(cls, entry: T.Union[str, T.List[str]]) -> T.Tuple[T.List[str], T.List[str]]:
+ compiler = mesonlib.stringlistify(entry)
+ # Ensure ccache exists and remove it if it doesn't
+ if compiler[0] == 'ccache':
+ compiler = compiler[1:]
+ ccache = cls.detect_ccache()
+ else:
+ ccache = []
+ # Return value has to be a list of compiler 'choices'
+ return compiler, ccache
+
+ def lookup_entry(self, name: str) -> T.Optional[T.List[str]]:
+ """Lookup binary in cross/native file and fallback to environment.
+
+ Returns command with args as list if found, Returns `None` if nothing is
+ found.
+ """
+ command = self.binaries.get(name)
+ if not command:
+ return None
+ elif not command[0].strip():
+ return None
+ return command
+
+class CMakeVariables:
+ def __init__(self, variables: T.Optional[T.Dict[str, T.Any]] = None) -> None:
+ variables = variables or {}
+ self.variables = {} # type: T.Dict[str, T.List[str]]
+
+ for key, value in variables.items():
+ value = mesonlib.listify(value)
+ for i in value:
+ assert isinstance(i, str)
+ self.variables[key] = value
+
+ def get_variables(self) -> T.Dict[str, T.List[str]]:
+ return self.variables
diff --git a/meson/mesonbuild/environment.py b/meson/mesonbuild/environment.py
new file mode 100644
index 000000000..c7c53c645
--- /dev/null
+++ b/meson/mesonbuild/environment.py
@@ -0,0 +1,867 @@
+# Copyright 2012-2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import itertools
+import os, platform, re, sys, shutil
+import typing as T
+import collections
+
+from . import coredata
+from . import mesonlib
+from .mesonlib import (
+ MesonException, EnvironmentException, MachineChoice, Popen_safe, PerMachine,
+ PerMachineDefaultable, PerThreeMachineDefaultable, split_args, quote_arg, OptionKey,
+ search_version
+)
+from . import mlog
+from .programs import (
+ ExternalProgram, EmptyExternalProgram
+)
+
+from .envconfig import (
+ BinaryTable, MachineInfo, Properties, known_cpu_families, CMakeVariables,
+)
+from . import compilers
+from .compilers import (
+ Compiler,
+ is_assembly,
+ is_header,
+ is_library,
+ is_llvm_ir,
+ is_object,
+ is_source,
+)
+
+from functools import lru_cache
+from mesonbuild import envconfig
+
+if T.TYPE_CHECKING:
+ from configparser import ConfigParser
+
+ from .dependencies import ExternalProgram
+
+build_filename = 'meson.build'
+
+CompilersDict = T.Dict[str, Compiler]
+
+if T.TYPE_CHECKING:
+ import argparse
+
+
+def _get_env_var(for_machine: MachineChoice, is_cross: bool, var_name: str) -> T.Optional[str]:
+ """
+ Returns the exact env var and the value.
+ """
+ candidates = PerMachine(
+ # The prefixed build version takes priority, but if we are native
+ # compiling we fall back on the unprefixed host version. This
+ # allows native builds to never need to worry about the 'BUILD_*'
+ # ones.
+ ([var_name + '_FOR_BUILD'] if is_cross else [var_name]),
+ # Always just the unprefixed host verions
+ [var_name]
+ )[for_machine]
+ for var in candidates:
+ value = os.environ.get(var)
+ if value is not None:
+ break
+ else:
+ formatted = ', '.join([f'{var!r}' for var in candidates])
+ mlog.debug(f'None of {formatted} are defined in the environment, not changing global flags.')
+ return None
+ mlog.debug(f'Using {var!r} from environment with value: {value!r}')
+ return value
+
+
+def detect_gcovr(min_version='3.3', new_rootdir_version='4.2', log=False):
+ gcovr_exe = 'gcovr'
+ try:
+ p, found = Popen_safe([gcovr_exe, '--version'])[0:2]
+ except (FileNotFoundError, PermissionError):
+ # Doesn't exist in PATH or isn't executable
+ return None, None
+ found = search_version(found)
+ if p.returncode == 0 and mesonlib.version_compare(found, '>=' + min_version):
+ if log:
+ mlog.log('Found gcovr-{} at {}'.format(found, quote_arg(shutil.which(gcovr_exe))))
+ return gcovr_exe, mesonlib.version_compare(found, '>=' + new_rootdir_version)
+ return None, None
+
+def detect_llvm_cov():
+ tools = get_llvm_tool_names('llvm-cov')
+ for tool in tools:
+ if mesonlib.exe_exists([tool, '--version']):
+ return tool
+ return None
+
+def find_coverage_tools() -> T.Tuple[T.Optional[str], T.Optional[str], T.Optional[str], T.Optional[str], T.Optional[str]]:
+ gcovr_exe, gcovr_new_rootdir = detect_gcovr()
+
+ llvm_cov_exe = detect_llvm_cov()
+
+ lcov_exe = 'lcov'
+ genhtml_exe = 'genhtml'
+
+ if not mesonlib.exe_exists([lcov_exe, '--version']):
+ lcov_exe = None
+ if not mesonlib.exe_exists([genhtml_exe, '--version']):
+ genhtml_exe = None
+
+ return gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe, llvm_cov_exe
+
+def detect_ninja(version: str = '1.8.2', log: bool = False) -> T.List[str]:
+ r = detect_ninja_command_and_version(version, log)
+ return r[0] if r else None
+
+def detect_ninja_command_and_version(version: str = '1.8.2', log: bool = False) -> T.Tuple[T.List[str], str]:
+ env_ninja = os.environ.get('NINJA', None)
+ for n in [env_ninja] if env_ninja else ['ninja', 'ninja-build', 'samu']:
+ prog = ExternalProgram(n, silent=True)
+ if not prog.found():
+ continue
+ try:
+ p, found = Popen_safe(prog.command + ['--version'])[0:2]
+ except (FileNotFoundError, PermissionError):
+ # Doesn't exist in PATH or isn't executable
+ continue
+ found = found.strip()
+ # Perhaps we should add a way for the caller to know the failure mode
+ # (not found or too old)
+ if p.returncode == 0 and mesonlib.version_compare(found, '>=' + version):
+ if log:
+ name = os.path.basename(n)
+ if name.endswith('-' + found):
+ name = name[0:-1 - len(found)]
+ if name == 'ninja-build':
+ name = 'ninja'
+ if name == 'samu':
+ name = 'samurai'
+ mlog.log('Found {}-{} at {}'.format(name, found,
+ ' '.join([quote_arg(x) for x in prog.command])))
+ return (prog.command, found)
+
+def get_llvm_tool_names(tool: str) -> T.List[str]:
+ # Ordered list of possible suffixes of LLVM executables to try. Start with
+ # base, then try newest back to oldest (3.5 is arbitrary), and finally the
+ # devel version. Please note that the development snapshot in Debian does
+ # not have a distinct name. Do not move it to the beginning of the list
+ # unless it becomes a stable release.
+ suffixes = [
+ '', # base (no suffix)
+ '-12', '12',
+ '-11', '11',
+ '-10', '10',
+ '-9', '90',
+ '-8', '80',
+ '-7', '70',
+ '-6.0', '60',
+ '-5.0', '50',
+ '-4.0', '40',
+ '-3.9', '39',
+ '-3.8', '38',
+ '-3.7', '37',
+ '-3.6', '36',
+ '-3.5', '35',
+ '-13', # Debian development snapshot
+ '-devel', # FreeBSD development snapshot
+ ]
+ names = []
+ for suffix in suffixes:
+ names.append(tool + suffix)
+ return names
+
+def detect_scanbuild() -> T.List[str]:
+ """ Look for scan-build binary on build platform
+
+ First, if a SCANBUILD env variable has been provided, give it precedence
+ on all platforms.
+
+ For most platforms, scan-build is found is the PATH contains a binary
+ named "scan-build". However, some distribution's package manager (FreeBSD)
+ don't. For those, loop through a list of candidates to see if one is
+ available.
+
+ Return: a single-element list of the found scan-build binary ready to be
+ passed to Popen()
+ """
+ exelist = []
+ if 'SCANBUILD' in os.environ:
+ exelist = split_args(os.environ['SCANBUILD'])
+
+ else:
+ tools = get_llvm_tool_names('scan-build')
+ for tool in tools:
+ if shutil.which(tool) is not None:
+ exelist = [shutil.which(tool)]
+ break
+
+ if exelist:
+ tool = exelist[0]
+ if os.path.isfile(tool) and os.access(tool, os.X_OK):
+ return [tool]
+ return []
+
+def detect_clangformat() -> T.List[str]:
+ """ Look for clang-format binary on build platform
+
+ Do the same thing as detect_scanbuild to find clang-format except it
+ currently does not check the environment variable.
+
+ Return: a single-element list of the found clang-format binary ready to be
+ passed to Popen()
+ """
+ tools = get_llvm_tool_names('clang-format')
+ for tool in tools:
+ path = shutil.which(tool)
+ if path is not None:
+ return [path]
+ return []
+
+def detect_native_windows_arch():
+ """
+ The architecture of Windows itself: x86, amd64 or arm64
+ """
+ # These env variables are always available. See:
+ # https://msdn.microsoft.com/en-us/library/aa384274(VS.85).aspx
+ # https://blogs.msdn.microsoft.com/david.wang/2006/03/27/howto-detect-process-bitness/
+ arch = os.environ.get('PROCESSOR_ARCHITEW6432', '').lower()
+ if not arch:
+ try:
+ # If this doesn't exist, something is messing with the environment
+ arch = os.environ['PROCESSOR_ARCHITECTURE'].lower()
+ except KeyError:
+ raise EnvironmentException('Unable to detect native OS architecture')
+ return arch
+
+def detect_windows_arch(compilers: CompilersDict) -> str:
+ """
+ Detecting the 'native' architecture of Windows is not a trivial task. We
+ cannot trust that the architecture that Python is built for is the 'native'
+ one because you can run 32-bit apps on 64-bit Windows using WOW64 and
+ people sometimes install 32-bit Python on 64-bit Windows.
+
+ We also can't rely on the architecture of the OS itself, since it's
+ perfectly normal to compile and run 32-bit applications on Windows as if
+ they were native applications. It's a terrible experience to require the
+ user to supply a cross-info file to compile 32-bit applications on 64-bit
+ Windows. Thankfully, the only way to compile things with Visual Studio on
+ Windows is by entering the 'msvc toolchain' environment, which can be
+ easily detected.
+
+ In the end, the sanest method is as follows:
+ 1. Check environment variables that are set by Windows and WOW64 to find out
+ if this is x86 (possibly in WOW64), if so use that as our 'native'
+ architecture.
+ 2. If the compiler toolchain target architecture is x86, use that as our
+ 'native' architecture.
+ 3. Otherwise, use the actual Windows architecture
+
+ """
+ os_arch = detect_native_windows_arch()
+ if os_arch == 'x86':
+ return os_arch
+ # If we're on 64-bit Windows, 32-bit apps can be compiled without
+ # cross-compilation. So if we're doing that, just set the native arch as
+ # 32-bit and pretend like we're running under WOW64. Else, return the
+ # actual Windows architecture that we deduced above.
+ for compiler in compilers.values():
+ if compiler.id == 'msvc' and (compiler.target == 'x86' or compiler.target == '80x86'):
+ return 'x86'
+ if compiler.id == 'clang-cl' and compiler.target == 'x86':
+ return 'x86'
+ if compiler.id == 'gcc' and compiler.has_builtin_define('__i386__'):
+ return 'x86'
+ return os_arch
+
+def any_compiler_has_define(compilers: CompilersDict, define):
+ for c in compilers.values():
+ try:
+ if c.has_builtin_define(define):
+ return True
+ except mesonlib.MesonException:
+ # Ignore compilers that do not support has_builtin_define.
+ pass
+ return False
+
+def detect_cpu_family(compilers: CompilersDict) -> str:
+ """
+ Python is inconsistent in its platform module.
+ It returns different values for the same cpu.
+ For x86 it might return 'x86', 'i686' or somesuch.
+ Do some canonicalization.
+ """
+ if mesonlib.is_windows():
+ trial = detect_windows_arch(compilers)
+ elif mesonlib.is_freebsd() or mesonlib.is_netbsd() or mesonlib.is_openbsd() or mesonlib.is_qnx() or mesonlib.is_aix():
+ trial = platform.processor().lower()
+ else:
+ trial = platform.machine().lower()
+ if trial.startswith('i') and trial.endswith('86'):
+ trial = 'x86'
+ elif trial == 'bepc':
+ trial = 'x86'
+ elif trial == 'arm64':
+ trial = 'aarch64'
+ elif trial.startswith('aarch64'):
+ # This can be `aarch64_be`
+ trial = 'aarch64'
+ elif trial.startswith('arm') or trial.startswith('earm'):
+ trial = 'arm'
+ elif trial.startswith(('powerpc64', 'ppc64')):
+ trial = 'ppc64'
+ elif trial.startswith(('powerpc', 'ppc')) or trial in {'macppc', 'power macintosh'}:
+ trial = 'ppc'
+ elif trial in ('amd64', 'x64', 'i86pc'):
+ trial = 'x86_64'
+ elif trial in {'sun4u', 'sun4v'}:
+ trial = 'sparc64'
+ elif trial.startswith('mips'):
+ if '64' not in trial:
+ trial = 'mips'
+ else:
+ trial = 'mips64'
+ elif trial in {'ip30', 'ip35'}:
+ trial = 'mips64'
+
+ # On Linux (and maybe others) there can be any mixture of 32/64 bit code in
+ # the kernel, Python, system, 32-bit chroot on 64-bit host, etc. The only
+ # reliable way to know is to check the compiler defines.
+ if trial == 'x86_64':
+ if any_compiler_has_define(compilers, '__i386__'):
+ trial = 'x86'
+ elif trial == 'aarch64':
+ if any_compiler_has_define(compilers, '__arm__'):
+ trial = 'arm'
+ # Add more quirks here as bugs are reported. Keep in sync with detect_cpu()
+ # below.
+ elif trial == 'parisc64':
+ # ATM there is no 64 bit userland for PA-RISC. Thus always
+ # report it as 32 bit for simplicity.
+ trial = 'parisc'
+ elif trial == 'ppc':
+ # AIX always returns powerpc, check here for 64-bit
+ if any_compiler_has_define(compilers, '__64BIT__'):
+ trial = 'ppc64'
+
+ if trial not in known_cpu_families:
+ mlog.warning(f'Unknown CPU family {trial!r}, please report this at '
+ 'https://github.com/mesonbuild/meson/issues/new with the '
+ 'output of `uname -a` and `cat /proc/cpuinfo`')
+
+ return trial
+
+def detect_cpu(compilers: CompilersDict) -> str:
+ if mesonlib.is_windows():
+ trial = detect_windows_arch(compilers)
+ elif mesonlib.is_freebsd() or mesonlib.is_netbsd() or mesonlib.is_openbsd() or mesonlib.is_aix():
+ trial = platform.processor().lower()
+ else:
+ trial = platform.machine().lower()
+
+ if trial in ('amd64', 'x64', 'i86pc'):
+ trial = 'x86_64'
+ if trial == 'x86_64':
+ # Same check as above for cpu_family
+ if any_compiler_has_define(compilers, '__i386__'):
+ trial = 'i686' # All 64 bit cpus have at least this level of x86 support.
+ elif trial.startswith('aarch64'):
+ # Same check as above for cpu_family
+ if any_compiler_has_define(compilers, '__arm__'):
+ trial = 'arm'
+ else:
+ # for aarch64_be
+ trial = 'aarch64'
+ elif trial.startswith('earm'):
+ trial = 'arm'
+ elif trial == 'e2k':
+ # Make more precise CPU detection for Elbrus platform.
+ trial = platform.processor().lower()
+ elif trial.startswith('mips'):
+ if '64' not in trial:
+ trial = 'mips'
+ else:
+ trial = 'mips64'
+ elif trial == 'ppc':
+ # AIX always returns powerpc, check here for 64-bit
+ if any_compiler_has_define(compilers, '__64BIT__'):
+ trial = 'ppc64'
+
+ # Add more quirks here as bugs are reported. Keep in sync with
+ # detect_cpu_family() above.
+ return trial
+
+def detect_system() -> str:
+ if sys.platform == 'cygwin':
+ return 'cygwin'
+ return platform.system().lower()
+
+def detect_msys2_arch() -> T.Optional[str]:
+ return os.environ.get('MSYSTEM_CARCH', None)
+
+def detect_machine_info(compilers: T.Optional[CompilersDict] = None) -> MachineInfo:
+ """Detect the machine we're running on
+
+ If compilers are not provided, we cannot know as much. None out those
+ fields to avoid accidentally depending on partial knowledge. The
+ underlying ''detect_*'' method can be called to explicitly use the
+ partial information.
+ """
+ return MachineInfo(
+ detect_system(),
+ detect_cpu_family(compilers) if compilers is not None else None,
+ detect_cpu(compilers) if compilers is not None else None,
+ sys.byteorder)
+
+# TODO make this compare two `MachineInfo`s purely. How important is the
+# `detect_cpu_family({})` distinction? It is the one impediment to that.
+def machine_info_can_run(machine_info: MachineInfo):
+ """Whether we can run binaries for this machine on the current machine.
+
+ Can almost always run 32-bit binaries on 64-bit natively if the host
+ and build systems are the same. We don't pass any compilers to
+ detect_cpu_family() here because we always want to know the OS
+ architecture, not what the compiler environment tells us.
+ """
+ if machine_info.system != detect_system():
+ return False
+ true_build_cpu_family = detect_cpu_family({})
+ return \
+ (machine_info.cpu_family == true_build_cpu_family) or \
+ ((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86')) or \
+ ((true_build_cpu_family == 'aarch64') and (machine_info.cpu_family == 'arm'))
+
+class Environment:
+ private_dir = 'meson-private'
+ log_dir = 'meson-logs'
+ info_dir = 'meson-info'
+
+ def __init__(self, source_dir: T.Optional[str], build_dir: T.Optional[str], options: 'argparse.Namespace') -> None:
+ self.source_dir = source_dir
+ self.build_dir = build_dir
+ # Do not try to create build directories when build_dir is none.
+ # This reduced mode is used by the --buildoptions introspector
+ if build_dir is not None:
+ self.scratch_dir = os.path.join(build_dir, Environment.private_dir)
+ self.log_dir = os.path.join(build_dir, Environment.log_dir)
+ self.info_dir = os.path.join(build_dir, Environment.info_dir)
+ os.makedirs(self.scratch_dir, exist_ok=True)
+ os.makedirs(self.log_dir, exist_ok=True)
+ os.makedirs(self.info_dir, exist_ok=True)
+ try:
+ self.coredata = coredata.load(self.get_build_dir()) # type: coredata.CoreData
+ self.first_invocation = False
+ except FileNotFoundError:
+ self.create_new_coredata(options)
+ except coredata.MesonVersionMismatchException as e:
+ # This is routine, but tell the user the update happened
+ mlog.log('Regenerating configuration from scratch:', str(e))
+ coredata.read_cmd_line_file(self.build_dir, options)
+ self.create_new_coredata(options)
+ except MesonException as e:
+ # If we stored previous command line options, we can recover from
+ # a broken/outdated coredata.
+ if os.path.isfile(coredata.get_cmd_line_file(self.build_dir)):
+ mlog.warning('Regenerating configuration from scratch.')
+ mlog.log('Reason:', mlog.red(str(e)))
+ coredata.read_cmd_line_file(self.build_dir, options)
+ self.create_new_coredata(options)
+ else:
+ raise e
+ else:
+ # Just create a fresh coredata in this case
+ self.scratch_dir = ''
+ self.create_new_coredata(options)
+
+ ## locally bind some unfrozen configuration
+
+ # Stores machine infos, the only *three* machine one because we have a
+ # target machine info on for the user (Meson never cares about the
+ # target machine.)
+ machines: PerThreeMachineDefaultable[MachineInfo] = PerThreeMachineDefaultable()
+
+ # Similar to coredata.compilers, but lower level in that there is no
+ # meta data, only names/paths.
+ binaries = PerMachineDefaultable() # type: PerMachineDefaultable[BinaryTable]
+
+ # Misc other properties about each machine.
+ properties = PerMachineDefaultable() # type: PerMachineDefaultable[Properties]
+
+ # CMake toolchain variables
+ cmakevars = PerMachineDefaultable() # type: PerMachineDefaultable[CMakeVariables]
+
+ ## Setup build machine defaults
+
+ # Will be fully initialized later using compilers later.
+ machines.build = detect_machine_info()
+
+ # Just uses hard-coded defaults and environment variables. Might be
+ # overwritten by a native file.
+ binaries.build = BinaryTable()
+ properties.build = Properties()
+
+ # Options with the key parsed into an OptionKey type.
+ #
+ # Note that order matters because of 'buildtype', if it is after
+ # 'optimization' and 'debug' keys, it override them.
+ self.options: T.MutableMapping[OptionKey, T.Union[str, T.List[str]]] = collections.OrderedDict()
+
+ ## Read in native file(s) to override build machine configuration
+
+ if self.coredata.config_files is not None:
+ config = coredata.parse_machine_files(self.coredata.config_files)
+ binaries.build = BinaryTable(config.get('binaries', {}))
+ properties.build = Properties(config.get('properties', {}))
+ cmakevars.build = CMakeVariables(config.get('cmake', {}))
+ self._load_machine_file_options(
+ config, properties.build,
+ MachineChoice.BUILD if self.coredata.cross_files else MachineChoice.HOST)
+
+ ## Read in cross file(s) to override host machine configuration
+
+ if self.coredata.cross_files:
+ config = coredata.parse_machine_files(self.coredata.cross_files)
+ properties.host = Properties(config.get('properties', {}))
+ binaries.host = BinaryTable(config.get('binaries', {}))
+ cmakevars.host = CMakeVariables(config.get('cmake', {}))
+ if 'host_machine' in config:
+ machines.host = MachineInfo.from_literal(config['host_machine'])
+ if 'target_machine' in config:
+ machines.target = MachineInfo.from_literal(config['target_machine'])
+ # Keep only per machine options from the native file. The cross
+ # file takes precedence over all other options.
+ for key, value in list(self.options.items()):
+ if self.coredata.is_per_machine_option(key):
+ self.options[key.as_build()] = value
+ self._load_machine_file_options(config, properties.host, MachineChoice.HOST)
+
+
+ ## "freeze" now initialized configuration, and "save" to the class.
+
+ self.machines = machines.default_missing()
+ self.binaries = binaries.default_missing()
+ self.properties = properties.default_missing()
+ self.cmakevars = cmakevars.default_missing()
+
+ # Command line options override those from cross/native files
+ self.options.update(options.cmd_line_options)
+
+ # Take default value from env if not set in cross/native files or command line.
+ self._set_default_options_from_env()
+ self._set_default_binaries_from_env()
+ self._set_default_properties_from_env()
+
+ # Warn if the user is using two different ways of setting build-type
+ # options that override each other
+ bt = OptionKey('buildtype')
+ db = OptionKey('debug')
+ op = OptionKey('optimization')
+ if bt in self.options and (db in self.options or op in self.options):
+ mlog.warning('Recommend using either -Dbuildtype or -Doptimization + -Ddebug. '
+ 'Using both is redundant since they override each other. '
+ 'See: https://mesonbuild.com/Builtin-options.html#build-type-options')
+
+ exe_wrapper = self.lookup_binary_entry(MachineChoice.HOST, 'exe_wrapper')
+ if exe_wrapper is not None:
+ self.exe_wrapper = ExternalProgram.from_bin_list(self, MachineChoice.HOST, 'exe_wrapper')
+ else:
+ self.exe_wrapper = None
+
+ self.default_cmake = ['cmake']
+ self.default_pkgconfig = ['pkg-config']
+ self.wrap_resolver = None
+
+ def _load_machine_file_options(self, config: 'ConfigParser', properties: Properties, machine: MachineChoice) -> None:
+ """Read the contents of a Machine file and put it in the options store."""
+
+ # Look for any options in the deprecated paths section, warn about
+ # those, then assign them. They will be overwritten by the ones in the
+ # "built-in options" section if they're in both sections.
+ paths = config.get('paths')
+ if paths:
+ mlog.deprecation('The [paths] section is deprecated, use the [built-in options] section instead.')
+ for k, v in paths.items():
+ self.options[OptionKey.from_string(k).evolve(machine=machine)] = v
+
+ # Next look for compiler options in the "properties" section, this is
+ # also deprecated, and these will also be overwritten by the "built-in
+ # options" section. We need to remove these from this section, as well.
+ deprecated_properties: T.Set[str] = set()
+ for lang in compilers.all_languages:
+ deprecated_properties.add(lang + '_args')
+ deprecated_properties.add(lang + '_link_args')
+ for k, v in properties.properties.copy().items():
+ if k in deprecated_properties:
+ mlog.deprecation(f'{k} in the [properties] section of the machine file is deprecated, use the [built-in options] section.')
+ self.options[OptionKey.from_string(k).evolve(machine=machine)] = v
+ del properties.properties[k]
+
+ for section, values in config.items():
+ if ':' in section:
+ subproject, section = section.split(':')
+ else:
+ subproject = ''
+ if section == 'built-in options':
+ for k, v in values.items():
+ key = OptionKey.from_string(k)
+ # If we're in the cross file, and there is a `build.foo` warn about that. Later we'll remove it.
+ if machine is MachineChoice.HOST and key.machine is not machine:
+ mlog.deprecation('Setting build machine options in cross files, please use a native file instead, this will be removed in meson 0.60', once=True)
+ if key.subproject:
+ raise MesonException('Do not set subproject options in [built-in options] section, use [subproject:built-in options] instead.')
+ self.options[key.evolve(subproject=subproject, machine=machine)] = v
+ elif section == 'project options' and machine is MachineChoice.HOST:
+ # Project options are only for the host machine, we don't want
+ # to read these from the native file
+ for k, v in values.items():
+ # Project options are always for the host machine
+ key = OptionKey.from_string(k)
+ if key.subproject:
+ raise MesonException('Do not set subproject options in [built-in options] section, use [subproject:built-in options] instead.')
+ self.options[key.evolve(subproject=subproject)] = v
+
+ def _set_default_options_from_env(self) -> None:
+ opts: T.List[T.Tuple[str, str]] = (
+ [(v, f'{k}_args') for k, v in compilers.compilers.CFLAGS_MAPPING.items()] +
+ [
+ ('PKG_CONFIG_PATH', 'pkg_config_path'),
+ ('CMAKE_PREFIX_PATH', 'cmake_prefix_path'),
+ ('LDFLAGS', 'ldflags'),
+ ('CPPFLAGS', 'cppflags'),
+ ]
+ )
+
+ env_opts: T.DefaultDict[OptionKey, T.List[str]] = collections.defaultdict(list)
+
+ for (evar, keyname), for_machine in itertools.product(opts, MachineChoice):
+ p_env = _get_env_var(for_machine, self.is_cross_build(), evar)
+ if p_env is not None:
+ # these may contain duplicates, which must be removed, else
+ # a duplicates-in-array-option warning arises.
+ if keyname == 'cmake_prefix_path':
+ if self.machines[for_machine].is_windows():
+ # Cannot split on ':' on Windows because its in the drive letter
+ _p_env = p_env.split(os.pathsep)
+ else:
+ # https://github.com/mesonbuild/meson/issues/7294
+ _p_env = re.split(r':|;', p_env)
+ p_list = list(mesonlib.OrderedSet(_p_env))
+ elif keyname == 'pkg_config_path':
+ p_list = list(mesonlib.OrderedSet(p_env.split(':')))
+ else:
+ p_list = split_args(p_env)
+ p_list = [e for e in p_list if e] # filter out any empty elements
+
+ # Take env vars only on first invocation, if the env changes when
+ # reconfiguring it gets ignored.
+ # FIXME: We should remember if we took the value from env to warn
+ # if it changes on future invocations.
+ if self.first_invocation:
+ if keyname == 'ldflags':
+ key = OptionKey('link_args', machine=for_machine, lang='c') # needs a language to initialize properly
+ for lang in compilers.compilers.LANGUAGES_USING_LDFLAGS:
+ key = key.evolve(lang=lang)
+ env_opts[key].extend(p_list)
+ elif keyname == 'cppflags':
+ key = OptionKey('env_args', machine=for_machine, lang='c')
+ for lang in compilers.compilers.LANGUAGES_USING_CPPFLAGS:
+ key = key.evolve(lang=lang)
+ env_opts[key].extend(p_list)
+ else:
+ key = OptionKey.from_string(keyname).evolve(machine=for_machine)
+ if evar in compilers.compilers.CFLAGS_MAPPING.values():
+ # If this is an environment variable, we have to
+ # store it separately until the compiler is
+ # instantiated, as we don't know whether the
+ # compiler will want to use these arguments at link
+ # time and compile time (instead of just at compile
+ # time) until we're instantiating that `Compiler`
+ # object. This is required so that passing
+ # `-Dc_args=` on the command line and `$CFLAGS`
+ # have subtely differen behavior. `$CFLAGS` will be
+ # added to the linker command line if the compiler
+ # acts as a linker driver, `-Dc_args` will not.
+ #
+ # We stil use the original key as the base here, as
+ # we want to inhert the machine and the compiler
+ # language
+ key = key.evolve('env_args')
+ env_opts[key].extend(p_list)
+
+ # Only store options that are not already in self.options,
+ # otherwise we'd override the machine files
+ for k, v in env_opts.items():
+ if k not in self.options:
+ self.options[k] = v
+
+ def _set_default_binaries_from_env(self) -> None:
+ """Set default binaries from the environment.
+
+ For example, pkg-config can be set via PKG_CONFIG, or in the machine
+ file. We want to set the default to the env variable.
+ """
+ opts = itertools.chain(envconfig.DEPRECATED_ENV_PROG_MAP.items(),
+ envconfig.ENV_VAR_PROG_MAP.items())
+
+ for (name, evar), for_machine in itertools.product(opts, MachineChoice):
+ p_env = _get_env_var(for_machine, self.is_cross_build(), evar)
+ if p_env is not None:
+ self.binaries[for_machine].binaries.setdefault(name, mesonlib.split_args(p_env))
+
+ def _set_default_properties_from_env(self) -> None:
+ """Properties which can also be set from the environment."""
+ # name, evar, split
+ opts: T.List[T.Tuple[str, T.List[str], bool]] = [
+ ('boost_includedir', ['BOOST_INCLUDEDIR'], False),
+ ('boost_librarydir', ['BOOST_LIBRARYDIR'], False),
+ ('boost_root', ['BOOST_ROOT', 'BOOSTROOT'], True),
+ ('java_home', ['JAVA_HOME'], False),
+ ]
+
+ for (name, evars, split), for_machine in itertools.product(opts, MachineChoice):
+ for evar in evars:
+ p_env = _get_env_var(for_machine, self.is_cross_build(), evar)
+ if p_env is not None:
+ if split:
+ self.properties[for_machine].properties.setdefault(name, p_env.split(os.pathsep))
+ else:
+ self.properties[for_machine].properties.setdefault(name, p_env)
+ break
+
+ def create_new_coredata(self, options: 'argparse.Namespace') -> None:
+ # WARNING: Don't use any values from coredata in __init__. It gets
+ # re-initialized with project options by the interpreter during
+ # build file parsing.
+ # meson_command is used by the regenchecker script, which runs meson
+ self.coredata = coredata.CoreData(options, self.scratch_dir, mesonlib.get_meson_command())
+ self.first_invocation = True
+
+ def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool:
+ return self.coredata.is_cross_build(when_building_for)
+
+ def dump_coredata(self) -> str:
+ return coredata.save(self.coredata, self.get_build_dir())
+
+ def get_log_dir(self) -> str:
+ return self.log_dir
+
+ def get_coredata(self) -> coredata.CoreData:
+ return self.coredata
+
+ def get_build_command(self, unbuffered=False):
+ cmd = mesonlib.get_meson_command().copy()
+ if unbuffered and 'python' in os.path.basename(cmd[0]):
+ cmd.insert(1, '-u')
+ return cmd
+
+ def is_header(self, fname):
+ return is_header(fname)
+
+ def is_source(self, fname):
+ return is_source(fname)
+
+ def is_assembly(self, fname):
+ return is_assembly(fname)
+
+ def is_llvm_ir(self, fname):
+ return is_llvm_ir(fname)
+
+ def is_object(self, fname):
+ return is_object(fname)
+
+ @lru_cache(maxsize=None)
+ def is_library(self, fname):
+ return is_library(fname)
+
+ def lookup_binary_entry(self, for_machine: MachineChoice, name: str) -> T.Optional[T.List[str]]:
+ return self.binaries[for_machine].lookup_entry(name)
+
+ def get_scratch_dir(self) -> str:
+ return self.scratch_dir
+
+ def get_source_dir(self) -> str:
+ return self.source_dir
+
+ def get_build_dir(self) -> str:
+ return self.build_dir
+
+ def get_import_lib_dir(self) -> str:
+ "Install dir for the import library (library used for linking)"
+ return self.get_libdir()
+
+ def get_shared_module_dir(self) -> str:
+ "Install dir for shared modules that are loaded at runtime"
+ return self.get_libdir()
+
+ def get_shared_lib_dir(self) -> str:
+ "Install dir for the shared library"
+ m = self.machines.host
+ # Windows has no RPATH or similar, so DLLs must be next to EXEs.
+ if m.is_windows() or m.is_cygwin():
+ return self.get_bindir()
+ return self.get_libdir()
+
+ def get_static_lib_dir(self) -> str:
+ "Install dir for the static library"
+ return self.get_libdir()
+
+ def get_prefix(self) -> str:
+ return self.coredata.get_option(OptionKey('prefix'))
+
+ def get_libdir(self) -> str:
+ return self.coredata.get_option(OptionKey('libdir'))
+
+ def get_libexecdir(self) -> str:
+ return self.coredata.get_option(OptionKey('libexecdir'))
+
+ def get_bindir(self) -> str:
+ return self.coredata.get_option(OptionKey('bindir'))
+
+ def get_includedir(self) -> str:
+ return self.coredata.get_option(OptionKey('includedir'))
+
+ def get_mandir(self) -> str:
+ return self.coredata.get_option(OptionKey('mandir'))
+
+ def get_datadir(self) -> str:
+ return self.coredata.get_option(OptionKey('datadir'))
+
+ def get_compiler_system_dirs(self, for_machine: MachineChoice):
+ for comp in self.coredata.compilers[for_machine].values():
+ if isinstance(comp, compilers.ClangCompiler):
+ index = 1
+ break
+ elif isinstance(comp, compilers.GnuCompiler):
+ index = 2
+ break
+ else:
+ # This option is only supported by gcc and clang. If we don't get a
+ # GCC or Clang compiler return and empty list.
+ return []
+
+ p, out, _ = Popen_safe(comp.get_exelist() + ['-print-search-dirs'])
+ if p.returncode != 0:
+ raise mesonlib.MesonException('Could not calculate system search dirs')
+ out = out.split('\n')[index].lstrip('libraries: =').split(':')
+ return [os.path.normpath(p) for p in out]
+
+ def need_exe_wrapper(self, for_machine: MachineChoice = MachineChoice.HOST):
+ value = self.properties[for_machine].get('needs_exe_wrapper', None)
+ if value is not None:
+ return value
+ return not machine_info_can_run(self.machines[for_machine])
+
+ def get_exe_wrapper(self) -> ExternalProgram:
+ if not self.need_exe_wrapper():
+ return EmptyExternalProgram()
+ return self.exe_wrapper
diff --git a/meson/mesonbuild/interpreter/__init__.py b/meson/mesonbuild/interpreter/__init__.py
new file mode 100644
index 000000000..62b09bf37
--- /dev/null
+++ b/meson/mesonbuild/interpreter/__init__.py
@@ -0,0 +1,25 @@
+# SPDX-license-identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Meson interpreter."""
+
+from .interpreter import Interpreter, permitted_dependency_kwargs
+from .compiler import CompilerHolder
+from .interpreterobjects import (ExecutableHolder, BuildTargetHolder, CustomTargetHolder,
+ CustomTargetIndexHolder, MachineHolder, Test,
+ ConfigurationDataObject, SubprojectHolder, DependencyHolder,
+ GeneratedListHolder, ExternalProgramHolder,
+ extract_required_kwarg)
diff --git a/meson/mesonbuild/interpreter/compiler.py b/meson/mesonbuild/interpreter/compiler.py
new file mode 100644
index 000000000..b1eef2fe5
--- /dev/null
+++ b/meson/mesonbuild/interpreter/compiler.py
@@ -0,0 +1,785 @@
+import functools
+
+from ..interpreterbase.decorators import typed_kwargs, KwargInfo
+
+from .interpreterobjects import (extract_required_kwarg, extract_search_dirs)
+
+from .. import mesonlib
+from .. import mlog
+from .. import dependencies
+from ..interpreterbase import (ObjectHolder, noPosargs, noKwargs, permittedKwargs,
+ FeatureNew, FeatureNewKwargs, disablerIfNotFound,
+ check_stringlist, InterpreterException, InvalidArguments)
+
+import typing as T
+import os
+
+if T.TYPE_CHECKING:
+ from ..interpreter import Interpreter
+ from ..compilers import Compiler, RunResult
+
+class TryRunResultHolder(ObjectHolder['RunResult']):
+ def __init__(self, res: 'RunResult', interpreter: 'Interpreter'):
+ super().__init__(res, interpreter)
+ self.methods.update({'returncode': self.returncode_method,
+ 'compiled': self.compiled_method,
+ 'stdout': self.stdout_method,
+ 'stderr': self.stderr_method,
+ })
+
+ @noPosargs
+ @permittedKwargs({})
+ def returncode_method(self, args, kwargs):
+ return self.held_object.returncode
+
+ @noPosargs
+ @permittedKwargs({})
+ def compiled_method(self, args, kwargs):
+ return self.held_object.compiled
+
+ @noPosargs
+ @permittedKwargs({})
+ def stdout_method(self, args, kwargs):
+ return self.held_object.stdout
+
+ @noPosargs
+ @permittedKwargs({})
+ def stderr_method(self, args, kwargs):
+ return self.held_object.stderr
+
+header_permitted_kwargs = {
+ 'required',
+ 'prefix',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+}
+
+find_library_permitted_kwargs = {
+ 'has_headers',
+ 'required',
+ 'dirs',
+ 'static',
+}
+
+find_library_permitted_kwargs |= {'header_' + k for k in header_permitted_kwargs}
+
+class CompilerHolder(ObjectHolder['Compiler']):
+ def __init__(self, compiler: 'Compiler', interpreter: 'Interpreter'):
+ super().__init__(compiler, interpreter)
+ self.environment = self.env
+ self.methods.update({'compiles': self.compiles_method,
+ 'links': self.links_method,
+ 'get_id': self.get_id_method,
+ 'get_linker_id': self.get_linker_id_method,
+ 'compute_int': self.compute_int_method,
+ 'sizeof': self.sizeof_method,
+ 'get_define': self.get_define_method,
+ 'check_header': self.check_header_method,
+ 'has_header': self.has_header_method,
+ 'has_header_symbol': self.has_header_symbol_method,
+ 'run': self.run_method,
+ 'has_function': self.has_function_method,
+ 'has_member': self.has_member_method,
+ 'has_members': self.has_members_method,
+ 'has_type': self.has_type_method,
+ 'alignment': self.alignment_method,
+ 'version': self.version_method,
+ 'cmd_array': self.cmd_array_method,
+ 'find_library': self.find_library_method,
+ 'has_argument': self.has_argument_method,
+ 'has_function_attribute': self.has_func_attribute_method,
+ 'get_supported_function_attributes': self.get_supported_function_attributes_method,
+ 'has_multi_arguments': self.has_multi_arguments_method,
+ 'get_supported_arguments': self.get_supported_arguments_method,
+ 'first_supported_argument': self.first_supported_argument_method,
+ 'has_link_argument': self.has_link_argument_method,
+ 'has_multi_link_arguments': self.has_multi_link_arguments_method,
+ 'get_supported_link_arguments': self.get_supported_link_arguments_method,
+ 'first_supported_link_argument': self.first_supported_link_argument_method,
+ 'unittest_args': self.unittest_args_method,
+ 'symbols_have_underscore_prefix': self.symbols_have_underscore_prefix_method,
+ 'get_argument_syntax': self.get_argument_syntax_method,
+ })
+
+ @property
+ def compiler(self) -> 'Compiler':
+ return self.held_object
+
+ def _dep_msg(self, deps, endl):
+ msg_single = 'with dependency {}'
+ msg_many = 'with dependencies {}'
+ if not deps:
+ return endl
+ if endl is None:
+ endl = ''
+ names = []
+ for d in deps:
+ if isinstance(d, dependencies.InternalDependency):
+ continue
+ if isinstance(d, dependencies.ExternalLibrary):
+ name = '-l' + d.name
+ else:
+ name = d.name
+ names.append(name)
+ if not names:
+ return None
+ tpl = msg_many if len(names) > 1 else msg_single
+ return tpl.format(', '.join(names)) + endl
+
+ @noPosargs
+ @permittedKwargs({})
+ def version_method(self, args, kwargs):
+ return self.compiler.version
+
+ @noPosargs
+ @permittedKwargs({})
+ def cmd_array_method(self, args, kwargs):
+ return self.compiler.exelist
+
+ def determine_args(self, kwargs, mode='link'):
+ nobuiltins = kwargs.get('no_builtin_args', False)
+ if not isinstance(nobuiltins, bool):
+ raise InterpreterException('Type of no_builtin_args not a boolean.')
+ args = []
+ incdirs = mesonlib.extract_as_list(kwargs, 'include_directories')
+ for i in incdirs:
+ from ..build import IncludeDirs
+ if not isinstance(i, IncludeDirs):
+ raise InterpreterException('Include directories argument must be an include_directories object.')
+ for idir in i.to_string_list(self.environment.get_source_dir()):
+ args += self.compiler.get_include_args(idir, False)
+ if not nobuiltins:
+ opts = self.environment.coredata.options
+ args += self.compiler.get_option_compile_args(opts)
+ if mode == 'link':
+ args += self.compiler.get_option_link_args(opts)
+ args += mesonlib.stringlistify(kwargs.get('args', []))
+ return args
+
+ def determine_dependencies(self, kwargs, endl=':'):
+ deps = kwargs.get('dependencies', None)
+ if deps is not None:
+ final_deps = []
+ while deps:
+ next_deps = []
+ for d in mesonlib.listify(deps):
+ if not isinstance(d, dependencies.Dependency) or d.is_built():
+ raise InterpreterException('Dependencies must be external dependencies')
+ final_deps.append(d)
+ next_deps.extend(d.ext_deps)
+ deps = next_deps
+ deps = final_deps
+ return deps, self._dep_msg(deps, endl)
+
+ @permittedKwargs({
+ 'prefix',
+ 'args',
+ 'dependencies',
+ })
+ def alignment_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Alignment method takes exactly one positional argument.')
+ check_stringlist(args)
+ typename = args[0]
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of alignment must be a string.')
+ extra_args = mesonlib.stringlistify(kwargs.get('args', []))
+ deps, msg = self.determine_dependencies(kwargs)
+ result = self.compiler.alignment(typename, prefix, self.environment,
+ extra_args=extra_args,
+ dependencies=deps)
+ mlog.log('Checking for alignment of', mlog.bold(typename, True), msg, result)
+ return result
+
+ @permittedKwargs({
+ 'name',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
+ def run_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Run method takes exactly one positional argument.')
+ code = args[0]
+ if isinstance(code, mesonlib.File):
+ code = mesonlib.File.from_absolute_file(
+ code.rel_to_builddir(self.environment.source_dir))
+ elif not isinstance(code, str):
+ raise InvalidArguments('Argument must be string or file.')
+ testname = kwargs.get('name', '')
+ if not isinstance(testname, str):
+ raise InterpreterException('Testname argument must be a string.')
+ extra_args = functools.partial(self.determine_args, kwargs)
+ deps, msg = self.determine_dependencies(kwargs, endl=None)
+ result = self.compiler.run(code, self.environment, extra_args=extra_args,
+ dependencies=deps)
+ if len(testname) > 0:
+ if not result.compiled:
+ h = mlog.red('DID NOT COMPILE')
+ elif result.returncode == 0:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO (%d)' % result.returncode)
+ mlog.log('Checking if', mlog.bold(testname, True), msg, 'runs:', h)
+ return result
+
+ @noPosargs
+ @permittedKwargs({})
+ def get_id_method(self, args, kwargs):
+ return self.compiler.get_id()
+
+ @noPosargs
+ @permittedKwargs({})
+ @FeatureNew('compiler.get_linker_id', '0.53.0')
+ def get_linker_id_method(self, args, kwargs):
+ return self.compiler.get_linker_id()
+
+ @noPosargs
+ @permittedKwargs({})
+ def symbols_have_underscore_prefix_method(self, args, kwargs):
+ '''
+ Check if the compiler prefixes _ (underscore) to global C symbols
+ See: https://en.wikipedia.org/wiki/Name_mangling#C
+ '''
+ return self.compiler.symbols_have_underscore_prefix(self.environment)
+
+ @noPosargs
+ @permittedKwargs({})
+ def unittest_args_method(self, args, kwargs):
+ '''
+ This function is deprecated and should not be used.
+ It can be removed in a future version of Meson.
+ '''
+ if not hasattr(self.compiler, 'get_feature_args'):
+ raise InterpreterException(f'This {self.compiler.get_display_language()} compiler has no feature arguments.')
+ build_to_src = os.path.relpath(self.environment.get_source_dir(), self.environment.get_build_dir())
+ return self.compiler.get_feature_args({'unittest': 'true'}, build_to_src)
+
+ @permittedKwargs({
+ 'prefix',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
+ def has_member_method(self, args, kwargs):
+ if len(args) != 2:
+ raise InterpreterException('Has_member takes exactly two arguments.')
+ check_stringlist(args)
+ typename, membername = args
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of has_member must be a string.')
+ extra_args = functools.partial(self.determine_args, kwargs)
+ deps, msg = self.determine_dependencies(kwargs)
+ had, cached = self.compiler.has_members(typename, [membername], prefix,
+ self.environment,
+ extra_args=extra_args,
+ dependencies=deps)
+ cached = mlog.blue('(cached)') if cached else ''
+ if had:
+ hadtxt = mlog.green('YES')
+ else:
+ hadtxt = mlog.red('NO')
+ mlog.log('Checking whether type', mlog.bold(typename, True),
+ 'has member', mlog.bold(membername, True), msg, hadtxt, cached)
+ return had
+
+ @permittedKwargs({
+ 'prefix',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
+ def has_members_method(self, args, kwargs):
+ if len(args) < 2:
+ raise InterpreterException('Has_members needs at least two arguments.')
+ check_stringlist(args)
+ typename, *membernames = args
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of has_members must be a string.')
+ extra_args = functools.partial(self.determine_args, kwargs)
+ deps, msg = self.determine_dependencies(kwargs)
+ had, cached = self.compiler.has_members(typename, membernames, prefix,
+ self.environment,
+ extra_args=extra_args,
+ dependencies=deps)
+ cached = mlog.blue('(cached)') if cached else ''
+ if had:
+ hadtxt = mlog.green('YES')
+ else:
+ hadtxt = mlog.red('NO')
+ members = mlog.bold(', '.join([f'"{m}"' for m in membernames]))
+ mlog.log('Checking whether type', mlog.bold(typename, True),
+ 'has members', members, msg, hadtxt, cached)
+ return had
+
+ @permittedKwargs({
+ 'prefix',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
+ def has_function_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Has_function takes exactly one argument.')
+ check_stringlist(args)
+ funcname = args[0]
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of has_function must be a string.')
+ extra_args = self.determine_args(kwargs)
+ deps, msg = self.determine_dependencies(kwargs)
+ had, cached = self.compiler.has_function(funcname, prefix, self.environment,
+ extra_args=extra_args,
+ dependencies=deps)
+ cached = mlog.blue('(cached)') if cached else ''
+ if had:
+ hadtxt = mlog.green('YES')
+ else:
+ hadtxt = mlog.red('NO')
+ mlog.log('Checking for function', mlog.bold(funcname, True), msg, hadtxt, cached)
+ return had
+
+ @permittedKwargs({
+ 'prefix',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
+ def has_type_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Has_type takes exactly one argument.')
+ check_stringlist(args)
+ typename = args[0]
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of has_type must be a string.')
+ extra_args = functools.partial(self.determine_args, kwargs)
+ deps, msg = self.determine_dependencies(kwargs)
+ had, cached = self.compiler.has_type(typename, prefix, self.environment,
+ extra_args=extra_args, dependencies=deps)
+ cached = mlog.blue('(cached)') if cached else ''
+ if had:
+ hadtxt = mlog.green('YES')
+ else:
+ hadtxt = mlog.red('NO')
+ mlog.log('Checking for type', mlog.bold(typename, True), msg, hadtxt, cached)
+ return had
+
+ @FeatureNew('compiler.compute_int', '0.40.0')
+ @permittedKwargs({
+ 'prefix',
+ 'low',
+ 'high',
+ 'guess',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
+ def compute_int_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Compute_int takes exactly one argument.')
+ check_stringlist(args)
+ expression = args[0]
+ prefix = kwargs.get('prefix', '')
+ low = kwargs.get('low', None)
+ high = kwargs.get('high', None)
+ guess = kwargs.get('guess', None)
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of compute_int must be a string.')
+ if low is not None and not isinstance(low, int):
+ raise InterpreterException('Low argument of compute_int must be an int.')
+ if high is not None and not isinstance(high, int):
+ raise InterpreterException('High argument of compute_int must be an int.')
+ if guess is not None and not isinstance(guess, int):
+ raise InterpreterException('Guess argument of compute_int must be an int.')
+ extra_args = functools.partial(self.determine_args, kwargs)
+ deps, msg = self.determine_dependencies(kwargs)
+ res = self.compiler.compute_int(expression, low, high, guess, prefix,
+ self.environment, extra_args=extra_args,
+ dependencies=deps)
+ mlog.log('Computing int of', mlog.bold(expression, True), msg, res)
+ return res
+
+ @permittedKwargs({
+ 'prefix',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
+ def sizeof_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Sizeof takes exactly one argument.')
+ check_stringlist(args)
+ element = args[0]
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of sizeof must be a string.')
+ extra_args = functools.partial(self.determine_args, kwargs)
+ deps, msg = self.determine_dependencies(kwargs)
+ esize = self.compiler.sizeof(element, prefix, self.environment,
+ extra_args=extra_args, dependencies=deps)
+ mlog.log('Checking for size of', mlog.bold(element, True), msg, esize)
+ return esize
+
+ @FeatureNew('compiler.get_define', '0.40.0')
+ @permittedKwargs({
+ 'prefix',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
+ def get_define_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('get_define() takes exactly one argument.')
+ check_stringlist(args)
+ element = args[0]
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of get_define() must be a string.')
+ extra_args = functools.partial(self.determine_args, kwargs)
+ deps, msg = self.determine_dependencies(kwargs)
+ value, cached = self.compiler.get_define(element, prefix, self.environment,
+ extra_args=extra_args,
+ dependencies=deps)
+ cached = mlog.blue('(cached)') if cached else ''
+ mlog.log('Fetching value of define', mlog.bold(element, True), msg, value, cached)
+ return value
+
+ @permittedKwargs({
+ 'name',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
+ def compiles_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('compiles method takes exactly one argument.')
+ code = args[0]
+ if isinstance(code, mesonlib.File):
+ code = mesonlib.File.from_absolute_file(
+ code.rel_to_builddir(self.environment.source_dir))
+ elif not isinstance(code, str):
+ raise InvalidArguments('Argument must be string or file.')
+ testname = kwargs.get('name', '')
+ if not isinstance(testname, str):
+ raise InterpreterException('Testname argument must be a string.')
+ extra_args = functools.partial(self.determine_args, kwargs)
+ deps, msg = self.determine_dependencies(kwargs, endl=None)
+ result, cached = self.compiler.compiles(code, self.environment,
+ extra_args=extra_args,
+ dependencies=deps)
+ if len(testname) > 0:
+ if result:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ cached = mlog.blue('(cached)') if cached else ''
+ mlog.log('Checking if', mlog.bold(testname, True), msg, 'compiles:', h, cached)
+ return result
+
+ @permittedKwargs({
+ 'name',
+ 'no_builtin_args',
+ 'include_directories',
+ 'args',
+ 'dependencies',
+ })
+ def links_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('links method takes exactly one argument.')
+ code = args[0]
+ if isinstance(code, mesonlib.File):
+ code = mesonlib.File.from_absolute_file(
+ code.rel_to_builddir(self.environment.source_dir))
+ elif not isinstance(code, str):
+ raise InvalidArguments('Argument must be string or file.')
+ testname = kwargs.get('name', '')
+ if not isinstance(testname, str):
+ raise InterpreterException('Testname argument must be a string.')
+ extra_args = functools.partial(self.determine_args, kwargs)
+ deps, msg = self.determine_dependencies(kwargs, endl=None)
+ result, cached = self.compiler.links(code, self.environment,
+ extra_args=extra_args,
+ dependencies=deps)
+ cached = mlog.blue('(cached)') if cached else ''
+ if len(testname) > 0:
+ if result:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ mlog.log('Checking if', mlog.bold(testname, True), msg, 'links:', h, cached)
+ return result
+
+ @FeatureNew('compiler.check_header', '0.47.0')
+ @FeatureNewKwargs('compiler.check_header', '0.50.0', ['required'])
+ @permittedKwargs(header_permitted_kwargs)
+ def check_header_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('check_header method takes exactly one argument.')
+ check_stringlist(args)
+ hname = args[0]
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of has_header must be a string.')
+ disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False)
+ if disabled:
+ mlog.log('Check usable header', mlog.bold(hname, True), 'skipped: feature', mlog.bold(feature), 'disabled')
+ return False
+ extra_args = functools.partial(self.determine_args, kwargs)
+ deps, msg = self.determine_dependencies(kwargs)
+ haz, cached = self.compiler.check_header(hname, prefix, self.environment,
+ extra_args=extra_args,
+ dependencies=deps)
+ cached = mlog.blue('(cached)') if cached else ''
+ if required and not haz:
+ raise InterpreterException(f'{self.compiler.get_display_language()} header {hname!r} not usable')
+ elif haz:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ mlog.log('Check usable header', mlog.bold(hname, True), msg, h, cached)
+ return haz
+
+ @FeatureNewKwargs('compiler.has_header', '0.50.0', ['required'])
+ @permittedKwargs(header_permitted_kwargs)
+ def has_header_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('has_header method takes exactly one argument.')
+ check_stringlist(args)
+ hname = args[0]
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of has_header must be a string.')
+ disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False)
+ if disabled:
+ mlog.log('Has header', mlog.bold(hname, True), 'skipped: feature', mlog.bold(feature), 'disabled')
+ return False
+ extra_args = functools.partial(self.determine_args, kwargs)
+ deps, msg = self.determine_dependencies(kwargs)
+ haz, cached = self.compiler.has_header(hname, prefix, self.environment,
+ extra_args=extra_args, dependencies=deps)
+ cached = mlog.blue('(cached)') if cached else ''
+ if required and not haz:
+ raise InterpreterException(f'{self.compiler.get_display_language()} header {hname!r} not found')
+ elif haz:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ mlog.log('Has header', mlog.bold(hname, True), msg, h, cached)
+ return haz
+
+ @FeatureNewKwargs('compiler.has_header_symbol', '0.50.0', ['required'])
+ @permittedKwargs(header_permitted_kwargs)
+ def has_header_symbol_method(self, args, kwargs):
+ if len(args) != 2:
+ raise InterpreterException('has_header_symbol method takes exactly two arguments.')
+ check_stringlist(args)
+ hname, symbol = args
+ prefix = kwargs.get('prefix', '')
+ if not isinstance(prefix, str):
+ raise InterpreterException('Prefix argument of has_header_symbol must be a string.')
+ disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False)
+ if disabled:
+ mlog.log(f'Header <{hname}> has symbol', mlog.bold(symbol, True), 'skipped: feature', mlog.bold(feature), 'disabled')
+ return False
+ extra_args = functools.partial(self.determine_args, kwargs)
+ deps, msg = self.determine_dependencies(kwargs)
+ haz, cached = self.compiler.has_header_symbol(hname, symbol, prefix, self.environment,
+ extra_args=extra_args,
+ dependencies=deps)
+ if required and not haz:
+ raise InterpreterException(f'{self.compiler.get_display_language()} symbol {symbol} not found in header {hname}')
+ elif haz:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ cached = mlog.blue('(cached)') if cached else ''
+ mlog.log(f'Header <{hname}> has symbol', mlog.bold(symbol, True), msg, h, cached)
+ return haz
+
+ def notfound_library(self, libname):
+ lib = dependencies.ExternalLibrary(libname, None,
+ self.environment,
+ self.compiler.language,
+ silent=True)
+ return lib
+
+ @FeatureNewKwargs('compiler.find_library', '0.51.0', ['static'])
+ @FeatureNewKwargs('compiler.find_library', '0.50.0', ['has_headers'])
+ @FeatureNewKwargs('compiler.find_library', '0.49.0', ['disabler'])
+ @disablerIfNotFound
+ @permittedKwargs(find_library_permitted_kwargs)
+ def find_library_method(self, args, kwargs):
+ # TODO add dependencies support?
+ if len(args) != 1:
+ raise InterpreterException('find_library method takes one argument.')
+ libname = args[0]
+ if not isinstance(libname, str):
+ raise InterpreterException('Library name not a string.')
+
+ disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+ if disabled:
+ mlog.log('Library', mlog.bold(libname), 'skipped: feature', mlog.bold(feature), 'disabled')
+ return self.notfound_library(libname)
+
+ has_header_kwargs = {k[7:]: v for k, v in kwargs.items() if k.startswith('header_')}
+ has_header_kwargs['required'] = required
+ headers = mesonlib.stringlistify(kwargs.get('has_headers', []))
+ for h in headers:
+ if not self.has_header_method([h], has_header_kwargs):
+ return self.notfound_library(libname)
+
+ search_dirs = extract_search_dirs(kwargs)
+
+ libtype = mesonlib.LibType.PREFER_SHARED
+ if 'static' in kwargs:
+ if not isinstance(kwargs['static'], bool):
+ raise InterpreterException('static must be a boolean')
+ libtype = mesonlib.LibType.STATIC if kwargs['static'] else mesonlib.LibType.SHARED
+ linkargs = self.compiler.find_library(libname, self.environment, search_dirs, libtype)
+ if required and not linkargs:
+ if libtype == mesonlib.LibType.PREFER_SHARED:
+ libtype = 'shared or static'
+ else:
+ libtype = libtype.name.lower()
+ raise InterpreterException('{} {} library {!r} not found'
+ .format(self.compiler.get_display_language(),
+ libtype, libname))
+ lib = dependencies.ExternalLibrary(libname, linkargs, self.environment,
+ self.compiler.language)
+ return lib
+
+ @permittedKwargs({})
+ def has_argument_method(self, args: T.Sequence[str], kwargs) -> bool:
+ args = mesonlib.stringlistify(args)
+ if len(args) != 1:
+ raise InterpreterException('has_argument takes exactly one argument.')
+ return self.has_multi_arguments_method(args, kwargs)
+
+ @permittedKwargs({})
+ def has_multi_arguments_method(self, args: T.Sequence[str], kwargs: dict):
+ args = mesonlib.stringlistify(args)
+ result, cached = self.compiler.has_multi_arguments(args, self.environment)
+ if result:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ cached = mlog.blue('(cached)') if cached else ''
+ mlog.log(
+ 'Compiler for {} supports arguments {}:'.format(
+ self.compiler.get_display_language(), ' '.join(args)),
+ h, cached)
+ return result
+
+ @FeatureNew('compiler.get_supported_arguments', '0.43.0')
+ @typed_kwargs(
+ 'compiler.get_supported_arguments',
+ KwargInfo('checked', str, default='off', since='0.59.0',
+ validator=lambda s: 'must be one of "warn", "require" or "off"' if s not in ['warn', 'require', 'off'] else None)
+ )
+ def get_supported_arguments_method(self, args: T.Sequence[str], kwargs: T.Dict[str, T.Any]):
+ args = mesonlib.stringlistify(args)
+ supported_args = []
+ checked = kwargs.pop('checked')
+
+ for arg in args:
+ if not self.has_argument_method(arg, kwargs):
+ msg = f'Compiler for {self.compiler.get_display_language()} does not support "{arg}"'
+ if checked == 'warn':
+ mlog.warning(msg)
+ elif checked == 'require':
+ raise mesonlib.MesonException(msg)
+ else:
+ supported_args.append(arg)
+ return supported_args
+
+ @permittedKwargs({})
+ def first_supported_argument_method(self, args: T.Sequence[str], kwargs: dict) -> T.List[str]:
+ for arg in mesonlib.stringlistify(args):
+ if self.has_argument_method(arg, kwargs):
+ mlog.log('First supported argument:', mlog.bold(arg))
+ return [arg]
+ mlog.log('First supported argument:', mlog.red('None'))
+ return []
+
+ @FeatureNew('compiler.has_link_argument', '0.46.0')
+ @permittedKwargs({})
+ def has_link_argument_method(self, args, kwargs):
+ args = mesonlib.stringlistify(args)
+ if len(args) != 1:
+ raise InterpreterException('has_link_argument takes exactly one argument.')
+ return self.has_multi_link_arguments_method(args, kwargs)
+
+ @FeatureNew('compiler.has_multi_link_argument', '0.46.0')
+ @permittedKwargs({})
+ def has_multi_link_arguments_method(self, args, kwargs):
+ args = mesonlib.stringlistify(args)
+ result, cached = self.compiler.has_multi_link_arguments(args, self.environment)
+ cached = mlog.blue('(cached)') if cached else ''
+ if result:
+ h = mlog.green('YES')
+ else:
+ h = mlog.red('NO')
+ mlog.log(
+ 'Compiler for {} supports link arguments {}:'.format(
+ self.compiler.get_display_language(), ' '.join(args)),
+ h, cached)
+ return result
+
+ @FeatureNew('compiler.get_supported_link_arguments_method', '0.46.0')
+ @permittedKwargs({})
+ def get_supported_link_arguments_method(self, args, kwargs):
+ args = mesonlib.stringlistify(args)
+ supported_args = []
+ for arg in args:
+ if self.has_link_argument_method(arg, kwargs):
+ supported_args.append(arg)
+ return supported_args
+
+ @FeatureNew('compiler.first_supported_link_argument_method', '0.46.0')
+ @permittedKwargs({})
+ def first_supported_link_argument_method(self, args, kwargs):
+ for i in mesonlib.stringlistify(args):
+ if self.has_link_argument_method(i, kwargs):
+ mlog.log('First supported link argument:', mlog.bold(i))
+ return [i]
+ mlog.log('First supported link argument:', mlog.red('None'))
+ return []
+
+ @FeatureNew('compiler.has_function_attribute', '0.48.0')
+ @permittedKwargs({})
+ def has_func_attribute_method(self, args, kwargs):
+ args = mesonlib.stringlistify(args)
+ if len(args) != 1:
+ raise InterpreterException('has_func_attribute takes exactly one argument.')
+ result, cached = self.compiler.has_func_attribute(args[0], self.environment)
+ cached = mlog.blue('(cached)') if cached else ''
+ h = mlog.green('YES') if result else mlog.red('NO')
+ mlog.log('Compiler for {} supports function attribute {}:'.format(self.compiler.get_display_language(), args[0]), h, cached)
+ return result
+
+ @FeatureNew('compiler.get_supported_function_attributes', '0.48.0')
+ @permittedKwargs({})
+ def get_supported_function_attributes_method(self, args, kwargs):
+ args = mesonlib.stringlistify(args)
+ return [a for a in args if self.has_func_attribute_method(a, kwargs)]
+
+ @FeatureNew('compiler.get_argument_syntax_method', '0.49.0')
+ @noPosargs
+ @noKwargs
+ def get_argument_syntax_method(self, args, kwargs):
+ return self.compiler.get_argument_syntax()
diff --git a/meson/mesonbuild/interpreter/dependencyfallbacks.py b/meson/mesonbuild/interpreter/dependencyfallbacks.py
new file mode 100644
index 000000000..180209aa1
--- /dev/null
+++ b/meson/mesonbuild/interpreter/dependencyfallbacks.py
@@ -0,0 +1,351 @@
+from .interpreterobjects import SubprojectHolder, extract_required_kwarg
+
+from .. import mlog
+from .. import dependencies
+from .. import build
+from ..wrap import WrapMode
+from ..mesonlib import OptionKey, extract_as_list, stringlistify, version_compare_many
+from ..dependencies import Dependency, DependencyException, NotFoundDependency
+from ..interpreterbase import (MesonInterpreterObject, FeatureNew,
+ InterpreterException, InvalidArguments,
+ TYPE_nkwargs, TYPE_nvar)
+
+import typing as T
+if T.TYPE_CHECKING:
+ from .interpreter import Interpreter
+
+
+class DependencyFallbacksHolder(MesonInterpreterObject):
+ def __init__(self, interpreter: 'Interpreter', names: T.List[str], allow_fallback: T.Optional[bool] = None,
+ default_options: T.Optional[T.List[str]] = None) -> None:
+ super().__init__(subproject=interpreter.subproject)
+ self.interpreter = interpreter
+ self.subproject = interpreter.subproject
+ self.coredata = interpreter.coredata
+ self.build = interpreter.build
+ self.environment = interpreter.environment
+ self.wrap_resolver = interpreter.environment.wrap_resolver
+ self.allow_fallback = allow_fallback
+ self.subproject_name = None
+ self.subproject_varname = None
+ self.subproject_kwargs = {'default_options': default_options or []}
+ self.names: T.List[str] = []
+ for name in names:
+ if not name:
+ raise InterpreterException('dependency_fallbacks empty name \'\' is not allowed')
+ if '<' in name or '>' in name or '=' in name:
+ raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify'
+ 'version\n requirements use the \'version\' keyword argument instead.')
+ if name in self.names:
+ raise InterpreterException('dependency_fallbacks name {name!r} is duplicated')
+ self.names.append(name)
+
+ def set_fallback(self, fbinfo: T.Optional[T.Union[T.List[str], str]]) -> None:
+ # Legacy: This converts dependency()'s fallback kwargs.
+ if fbinfo is None:
+ return
+ if self.allow_fallback is not None:
+ raise InvalidArguments('"fallback" and "allow_fallback" arguments are mutually exclusive')
+ fbinfo = stringlistify(fbinfo)
+ if len(fbinfo) == 0:
+ # dependency('foo', fallback: []) is the same as dependency('foo', allow_fallback: false)
+ self.allow_fallback = False
+ return
+ if len(fbinfo) == 1:
+ FeatureNew.single_use('Fallback without variable name', '0.53.0', self.subproject)
+ subp_name, varname = fbinfo[0], None
+ elif len(fbinfo) == 2:
+ subp_name, varname = fbinfo
+ else:
+ raise InterpreterException('Fallback info must have one or two items.')
+ self._subproject_impl(subp_name, varname)
+
+ def _subproject_impl(self, subp_name: str, varname: str) -> None:
+ if not varname:
+ # If no variable name is specified, check if the wrap file has one.
+ # If the wrap file has a variable name, better use it because the
+ # subproject most probably is not using meson.override_dependency().
+ for name in self.names:
+ varname = self.wrap_resolver.get_varname(subp_name, name)
+ if varname:
+ break
+ assert self.subproject_name is None
+ self.subproject_name = subp_name
+ self.subproject_varname = varname
+
+ def _do_dependency_cache(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+ name = func_args[0]
+ cached_dep = self._get_cached_dep(name, kwargs)
+ if cached_dep:
+ self._verify_fallback_consistency(cached_dep)
+ return cached_dep
+
+ def _do_dependency(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+ # Note that there is no df.dependency() method, this is called for names
+ # given as positional arguments to dependency_fallbacks(name1, ...).
+ # We use kwargs from the dependency() function, for things like version,
+ # module, etc.
+ name = func_args[0]
+ self._handle_featurenew_dependencies(name)
+ dep = dependencies.find_external_dependency(name, self.environment, kwargs)
+ if dep.found():
+ for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+ identifier = dependencies.get_dep_identifier(name, kwargs)
+ self.coredata.deps[for_machine].put(identifier, dep)
+ return dep
+ return None
+
+ def _do_existing_subproject(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+ subp_name = func_args[0]
+ varname = self.subproject_varname
+ if subp_name and self._get_subproject(subp_name):
+ return self._get_subproject_dep(subp_name, varname, kwargs)
+ return None
+
+ def _do_subproject(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+ if self.forcefallback:
+ mlog.log('Looking for a fallback subproject for the dependency',
+ mlog.bold(self.display_name), 'because:\nUse of fallback dependencies is forced.')
+ elif self.nofallback:
+ mlog.log('Not looking for a fallback subproject for the dependency',
+ mlog.bold(self.display_name), 'because:\nUse of fallback dependencies is disabled.')
+ return None
+ else:
+ mlog.log('Looking for a fallback subproject for the dependency',
+ mlog.bold(self.display_name))
+
+ # Configure the subproject
+ subp_name = self.subproject_name
+ varname = self.subproject_varname
+ self.interpreter.do_subproject(subp_name, 'meson', func_kwargs)
+ return self._get_subproject_dep(subp_name, varname, kwargs)
+
+ def _get_subproject(self, subp_name: str) -> T.Optional[SubprojectHolder]:
+ sub = self.interpreter.subprojects.get(subp_name)
+ if sub and sub.found():
+ return sub
+ return None
+
+ def _get_subproject_dep(self, subp_name: str, varname: str, kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+ # Verify the subproject is found
+ subproject = self._get_subproject(subp_name)
+ if not subproject:
+ mlog.log('Dependency', mlog.bold(self.display_name), 'from subproject',
+ mlog.bold(subp_name), 'found:', mlog.red('NO'),
+ mlog.blue('(subproject failed to configure)'))
+ return None
+
+ # The subproject has been configured. If for any reason the dependency
+ # cannot be found in this subproject we have to return not-found object
+ # instead of None, because we don't want to continue the lookup on the
+ # system.
+
+ # Check if the subproject overridden at least one of the names we got.
+ cached_dep = None
+ for name in self.names:
+ cached_dep = self._get_cached_dep(name, kwargs)
+ if cached_dep:
+ break
+
+ # If we have cached_dep we did all the checks and logging already in
+ # self._get_cached_dep().
+ if cached_dep:
+ self._verify_fallback_consistency(cached_dep)
+ return cached_dep
+
+ # Legacy: Use the variable name if provided instead of relying on the
+ # subproject to override one of our dependency names
+ if not varname:
+ mlog.warning(f'Subproject {subp_name!r} did not override {self.display_name!r} dependency and no variable name specified')
+ mlog.log('Dependency', mlog.bold(self.display_name), 'from subproject',
+ mlog.bold(subproject.subdir), 'found:', mlog.red('NO'))
+ return self._notfound_dependency()
+
+ var_dep = self._get_subproject_variable(subproject, varname) or self._notfound_dependency()
+ if not var_dep.found():
+ mlog.log('Dependency', mlog.bold(self.display_name), 'from subproject',
+ mlog.bold(subproject.subdir), 'found:', mlog.red('NO'))
+ return var_dep
+
+ wanted = stringlistify(kwargs.get('version', []))
+ found = var_dep.get_version()
+ if not self._check_version(wanted, found):
+ mlog.log('Dependency', mlog.bold(self.display_name), 'from subproject',
+ mlog.bold(subproject.subdir), 'found:', mlog.red('NO'),
+ 'found', mlog.normal_cyan(found), 'but need:',
+ mlog.bold(', '.join([f"'{e}'" for e in wanted])))
+ return self._notfound_dependency()
+
+ mlog.log('Dependency', mlog.bold(self.display_name), 'from subproject',
+ mlog.bold(subproject.subdir), 'found:', mlog.green('YES'),
+ mlog.normal_cyan(found) if found else None)
+ return var_dep
+
+ def _get_cached_dep(self, name: str, kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+ # Unlike other methods, this one returns not-found dependency instead
+ # of None in the case the dependency is cached as not-found, or if cached
+ # version does not match. In that case we don't want to continue with
+ # other candidates.
+ for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+ identifier = dependencies.get_dep_identifier(name, kwargs)
+ wanted_vers = stringlistify(kwargs.get('version', []))
+
+ override = self.build.dependency_overrides[for_machine].get(identifier)
+ if override:
+ info = [mlog.blue('(overridden)' if override.explicit else '(cached)')]
+ cached_dep = override.dep
+ # We don't implicitly override not-found dependencies, but user could
+ # have explicitly called meson.override_dependency() with a not-found
+ # dep.
+ if not cached_dep.found():
+ mlog.log('Dependency', mlog.bold(self.display_name),
+ 'found:', mlog.red('NO'), *info)
+ return cached_dep
+ else:
+ info = [mlog.blue('(cached)')]
+ cached_dep = self.coredata.deps[for_machine].get(identifier)
+
+ if cached_dep:
+ found_vers = cached_dep.get_version()
+ if not self._check_version(wanted_vers, found_vers):
+ mlog.log('Dependency', mlog.bold(name),
+ 'found:', mlog.red('NO'),
+ 'found', mlog.normal_cyan(found_vers), 'but need:',
+ mlog.bold(', '.join([f"'{e}'" for e in wanted_vers])),
+ *info)
+ return self._notfound_dependency()
+ if found_vers:
+ info = [mlog.normal_cyan(found_vers), *info]
+ mlog.log('Dependency', mlog.bold(self.display_name),
+ 'found:', mlog.green('YES'), *info)
+ return cached_dep
+ return None
+
+ def _get_subproject_variable(self, subproject: SubprojectHolder, varname: str) -> T.Optional[Dependency]:
+ try:
+ var_dep = subproject.get_variable_method([varname], {})
+ except InvalidArguments:
+ var_dep = None
+ if not isinstance(var_dep, Dependency):
+ mlog.warning(f'Variable {varname!r} in the subproject {subproject.subdir!r} is',
+ 'not found' if var_dep is None else 'not a dependency object')
+ return None
+ return var_dep
+
+ def _verify_fallback_consistency(self, cached_dep: Dependency):
+ subp_name = self.subproject_name
+ varname = self.subproject_varname
+ subproject = self._get_subproject(subp_name)
+ if subproject and varname:
+ var_dep = self._get_subproject_variable(subproject, varname)
+ if var_dep and cached_dep.found() and var_dep != cached_dep:
+ mlog.warning(f'Inconsistency: Subproject has overridden the dependency with another variable than {varname!r}')
+
+ def _handle_featurenew_dependencies(self, name: str) -> None:
+ 'Do a feature check on dependencies used by this subproject'
+ if name == 'mpi':
+ FeatureNew.single_use('MPI Dependency', '0.42.0', self.subproject)
+ elif name == 'pcap':
+ FeatureNew.single_use('Pcap Dependency', '0.42.0', self.subproject)
+ elif name == 'vulkan':
+ FeatureNew.single_use('Vulkan Dependency', '0.42.0', self.subproject)
+ elif name == 'libwmf':
+ FeatureNew.single_use('LibWMF Dependency', '0.44.0', self.subproject)
+ elif name == 'openmp':
+ FeatureNew.single_use('OpenMP Dependency', '0.46.0', self.subproject)
+
+ def _notfound_dependency(self) -> NotFoundDependency:
+ return NotFoundDependency(self.environment)
+
+ @staticmethod
+ def _check_version(wanted: T.Optional[str], found: str) -> bool:
+ if not wanted:
+ return True
+ if found == 'undefined' or not version_compare_many(found, wanted)[0]:
+ return False
+ return True
+
+ def _get_candidates(self) -> T.List[T.Tuple[T.Callable[[TYPE_nkwargs, TYPE_nvar, TYPE_nkwargs], T.Optional[Dependency]], TYPE_nvar, TYPE_nkwargs]]:
+ candidates = []
+ # 1. check if any of the names is cached already.
+ for name in self.names:
+ candidates.append((self._do_dependency_cache, [name], {}))
+ # 2. check if the subproject fallback has already been configured.
+ if self.subproject_name:
+ candidates.append((self._do_existing_subproject, [self.subproject_name], self.subproject_kwargs))
+ # 3. check external dependency if we are not forced to use subproject
+ if not self.forcefallback or not self.subproject_name:
+ for name in self.names:
+ candidates.append((self._do_dependency, [name], {}))
+ # 4. configure the subproject
+ if self.subproject_name:
+ candidates.append((self._do_subproject, [self.subproject_name], self.subproject_kwargs))
+ return candidates
+
+ def lookup(self, kwargs: TYPE_nkwargs, force_fallback: bool = False) -> Dependency:
+ self.display_name = self.names[0] if self.names else '(anonymous)'
+ mods = extract_as_list(kwargs, 'modules')
+ if mods:
+ self.display_name += ' (modules: {})'.format(', '.join(str(i) for i in mods))
+
+ disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+ if disabled:
+ mlog.log('Dependency', mlog.bold(self.display_name), 'skipped: feature', mlog.bold(feature), 'disabled')
+ return self._notfound_dependency()
+
+ # Check if usage of the subproject fallback is forced
+ wrap_mode = self.coredata.get_option(OptionKey('wrap_mode'))
+ force_fallback_for = self.coredata.get_option(OptionKey('force_fallback_for'))
+ self.nofallback = wrap_mode == WrapMode.nofallback
+ self.forcefallback = (force_fallback or
+ wrap_mode == WrapMode.forcefallback or
+ any(name in force_fallback_for for name in self.names) or
+ self.subproject_name in force_fallback_for)
+
+ # Add an implicit subproject fallback if none has been set explicitly,
+ # unless implicit fallback is not allowed.
+ # Legacy: self.allow_fallback can be None when that kwarg is not defined
+ # in dependency('name'). In that case we don't want to use implicit
+ # fallback when required is false because user will typically fallback
+ # manually using cc.find_library() for example.
+ if not self.subproject_name and self.allow_fallback is not False:
+ for name in self.names:
+ subp_name, varname = self.wrap_resolver.find_dep_provider(name)
+ if subp_name:
+ self.forcefallback |= subp_name in force_fallback_for
+ if self.forcefallback or self.allow_fallback is True or required or self._get_subproject(subp_name):
+ self._subproject_impl(subp_name, varname)
+ break
+
+ candidates = self._get_candidates()
+
+ # writing just "dependency('')" is an error, because it can only fail
+ if not candidates and required:
+ raise InvalidArguments('Dependency is required but has no candidates.')
+
+ # Try all candidates, only the last one is really required.
+ last = len(candidates) - 1
+ for i, item in enumerate(candidates):
+ func, func_args, func_kwargs = item
+ func_kwargs['required'] = required and (i == last)
+ kwargs['required'] = required and (i == last)
+ dep = func(kwargs, func_args, func_kwargs)
+ if dep and dep.found():
+ # Override this dependency to have consistent results in subsequent
+ # dependency lookups.
+ for name in self.names:
+ for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+ identifier = dependencies.get_dep_identifier(name, kwargs)
+ if identifier not in self.build.dependency_overrides[for_machine]:
+ self.build.dependency_overrides[for_machine][identifier] = \
+ build.DependencyOverride(dep, self.interpreter.current_node, explicit=False)
+ return dep
+ elif required and (dep or i == last):
+ # This was the last candidate or the dependency has been cached
+ # as not-found, or cached dependency version does not match,
+ # otherwise func() would have returned None instead.
+ raise DependencyException(f'Dependency {self.display_name!r} is required but not found.')
+ elif dep:
+ # Same as above, but the dependency is not required.
+ return dep
+ return self._notfound_dependency()
diff --git a/meson/mesonbuild/interpreter/interpreter.py b/meson/mesonbuild/interpreter/interpreter.py
new file mode 100644
index 000000000..4a60ff485
--- /dev/null
+++ b/meson/mesonbuild/interpreter/interpreter.py
@@ -0,0 +1,2794 @@
+# Copyright 2012-2021 The Meson development team
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mparser
+from .. import environment
+from .. import coredata
+from .. import dependencies
+from .. import mlog
+from .. import build
+from .. import optinterpreter
+from .. import compilers
+from ..wrap import wrap, WrapMode
+from .. import mesonlib
+from ..mesonlib import HoldableObject, FileMode, MachineChoice, OptionKey, listify, extract_as_list, has_path_sep
+from ..programs import ExternalProgram, NonExistingExternalProgram
+from ..dependencies import Dependency
+from ..depfile import DepFile
+from ..interpreterbase import ContainerTypeInfo, InterpreterBase, KwargInfo, typed_kwargs, typed_pos_args
+from ..interpreterbase import noPosargs, noKwargs, stringArgs, permittedKwargs, noArgsFlattening, noSecondLevelHolderResolving, permissive_unholder_return
+from ..interpreterbase import InterpreterException, InvalidArguments, InvalidCode, SubdirDoneRequest
+from ..interpreterbase import Disabler, disablerIfNotFound
+from ..interpreterbase import FeatureNew, FeatureDeprecated, FeatureNewKwargs, FeatureDeprecatedKwargs
+from ..interpreterbase import ObjectHolder, RangeHolder
+from ..interpreterbase import TYPE_nkwargs, TYPE_nvar, TYPE_var
+from ..modules import ExtensionModule, ModuleObject, MutableModuleObject, NewExtensionModule, NotFoundExtensionModule
+from ..cmake import CMakeInterpreter
+from ..backend.backends import Backend, ExecutableSerialisation
+
+from . import interpreterobjects as OBJ
+from . import compiler as compilerOBJ
+from .mesonmain import MesonMain
+from .dependencyfallbacks import DependencyFallbacksHolder
+from .interpreterobjects import (
+ SubprojectHolder,
+ EnvironmentVariablesObject,
+ ConfigurationDataObject,
+ Test,
+ RunProcess,
+ extract_required_kwarg,
+ extract_search_dirs,
+ NullSubprojectInterpreter,
+)
+
+from pathlib import Path
+import os
+import shutil
+import uuid
+import re
+import stat
+import collections
+import typing as T
+import textwrap
+import importlib
+
+if T.TYPE_CHECKING:
+ from . import kwargs
+
+ # Input source types passed to Targets
+ SourceInputs = T.Union[mesonlib.File, build.GeneratedList, build.BuildTarget, build.BothLibraries,
+ build.CustomTargetIndex, build.CustomTarget, build.GeneratedList, str]
+ # Input source types passed to the build.Target5 classes
+ SourceOutputs = T.Union[mesonlib.File, build.GeneratedList,
+ build.BuildTarget, build.CustomTargetIndex, build.CustomTarget,
+ build.GeneratedList]
+
+
+def _language_validator(l: T.List[str]) -> T.Optional[str]:
+ """Validate language keyword argument.
+
+ Particularly for functions like `add_compiler()`, and `add_*_args()`
+ """
+ diff = {a.lower() for a in l}.difference(compilers.all_languages)
+ if diff:
+ return f'unknown languages: {", ".join(diff)}'
+ return None
+
+
+def _install_mode_validator(mode: T.List[T.Union[str, bool, int]]) -> T.Optional[str]:
+ """Validate the `install_mode` keyword argument.
+
+ This is a rather odd thing, it's a scalar, or an array of 3 values in the form:
+ [(str | False), (str | int | False) = False, (str | int | False) = False]
+ Where the second and third arguments are not required, and are considered to
+ default to False.
+ """
+ if not mode:
+ return None
+ if True in mode:
+ return 'can only be a string or false, not true'
+ if len(mode) > 3:
+ return 'may have at most 3 elements'
+
+ perms = mode[0]
+ if not isinstance(perms, (str, bool)):
+ return 'permissions part must be a string or false'
+
+ if isinstance(perms, str):
+ if not len(perms) == 9:
+ return (f'permissions string must be exactly 9 characters, got "{len(perms)}" '
+ 'in the form rwxr-xr-x')
+ for i in [0, 3, 6]:
+ if perms[i] not in {'-', 'r'}:
+ return f'bit {i} must be "-" or "r", not {perms[i]}'
+ for i in [1, 4, 7]:
+ if perms[i] not in {'-', 'w'}:
+ return f'bit {i} must be "-" or "w", not {perms[i]}'
+ for i in [2, 5]:
+ if perms[i] not in {'-', 'x', 's', 'S'}:
+ return f'bit {i} must be "-", "s", "S", or "x", not {perms[i]}'
+ if perms[8] not in {'-', 'x', 't', 'T'}:
+ return f'bit 8 must be "-", "t", "T", or "x", not {perms[8]}'
+
+ if len(mode) >= 2 and not isinstance(mode[1], (int, str, bool)):
+ return 'second componenent must be a string, number, or False if provided'
+ if len(mode) >= 3 and not isinstance(mode[2], (int, str, bool)):
+ return 'third componenent must be a string, number, or False if provided'
+
+ return None
+
+
+def _install_mode_convertor(mode: T.Optional[T.List[T.Union[str, bool, int]]]) -> FileMode:
+ """Convert the DSL form of the `install_mode` keyword arugment to `FileMode`
+
+ This is not required, and if not required returns None
+
+ TODO: It's not clear to me why this needs to be None and not just return an
+ emtpy FileMode.
+ """
+ # this has already been validated by the validator
+ return FileMode(*[m if isinstance(m, str) else None for m in mode])
+
+
+_NATIVE_KW = KwargInfo(
+ 'native', bool,
+ default=False,
+ convertor=lambda n: MachineChoice.BUILD if n else MachineChoice.HOST)
+
+_LANGUAGE_KW = KwargInfo(
+ 'language', ContainerTypeInfo(list, str, allow_empty=False),
+ listify=True,
+ required=True,
+ validator=_language_validator,
+ convertor=lambda x: [i.lower() for i in x])
+
+_INSTALL_MODE_KW = KwargInfo(
+ 'install_mode',
+ ContainerTypeInfo(list, (str, bool, int)),
+ listify=True,
+ default=[],
+ validator=_install_mode_validator,
+ convertor=_install_mode_convertor,
+)
+
+_REQUIRED_KW = KwargInfo(
+ 'required',
+ (bool, coredata.UserFeatureOption),
+ default=True,
+ # TODO: extract_required_kwarg could be converted to a convertor
+)
+
+
+def stringifyUserArguments(args, quote=False):
+ if isinstance(args, list):
+ return '[%s]' % ', '.join([stringifyUserArguments(x, True) for x in args])
+ elif isinstance(args, dict):
+ return '{%s}' % ', '.join(['{} : {}'.format(stringifyUserArguments(k, True), stringifyUserArguments(v, True)) for k, v in args.items()])
+ elif isinstance(args, int):
+ return str(args)
+ elif isinstance(args, str):
+ return f"'{args}'" if quote else args
+ raise InvalidArguments('Function accepts only strings, integers, lists, dictionaries and lists thereof.')
+
+class Summary:
+ def __init__(self, project_name, project_version):
+ self.project_name = project_name
+ self.project_version = project_version
+ self.sections = collections.defaultdict(dict)
+ self.max_key_len = 0
+
+ def add_section(self, section, values, kwargs, subproject):
+ bool_yn = kwargs.get('bool_yn', False)
+ if not isinstance(bool_yn, bool):
+ raise InterpreterException('bool_yn keyword argument must be boolean')
+ list_sep = kwargs.get('list_sep')
+ if list_sep is not None and not isinstance(list_sep, str):
+ raise InterpreterException('list_sep keyword argument must be string')
+ for k, v in values.items():
+ if k in self.sections[section]:
+ raise InterpreterException(f'Summary section {section!r} already have key {k!r}')
+ formatted_values = []
+ for i in listify(v):
+ if isinstance(i, bool) and bool_yn:
+ formatted_values.append(mlog.green('YES') if i else mlog.red('NO'))
+ elif isinstance(i, (str, int, bool)):
+ formatted_values.append(str(i))
+ elif isinstance(i, (ExternalProgram, Dependency)):
+ FeatureNew.single_use('dependency or external program in summary', '0.57.0', subproject)
+ formatted_values.append(i.summary_value())
+ elif isinstance(i, coredata.UserOption):
+ FeatureNew.single_use('feature option in summary', '0.58.0', subproject)
+ formatted_values.append(i.printable_value())
+ else:
+ m = 'Summary value in section {!r}, key {!r}, must be string, integer, boolean, dependency or external program'
+ raise InterpreterException(m.format(section, k))
+ self.sections[section][k] = (formatted_values, list_sep)
+ self.max_key_len = max(self.max_key_len, len(k))
+
+ def dump(self):
+ mlog.log(self.project_name, mlog.normal_cyan(self.project_version))
+ for section, values in self.sections.items():
+ mlog.log('') # newline
+ if section:
+ mlog.log(' ', mlog.bold(section))
+ for k, v in values.items():
+ v, list_sep = v
+ padding = self.max_key_len - len(k)
+ end = ' ' if v else ''
+ mlog.log(' ' * 3, k + ' ' * padding + ':', end=end)
+ indent = self.max_key_len + 6
+ self.dump_value(v, list_sep, indent)
+ mlog.log('') # newline
+
+ def dump_value(self, arr, list_sep, indent):
+ lines_sep = '\n' + ' ' * indent
+ if list_sep is None:
+ mlog.log(*arr, sep=lines_sep)
+ return
+ max_len = shutil.get_terminal_size().columns
+ line = []
+ line_len = indent
+ lines_sep = list_sep.rstrip() + lines_sep
+ for v in arr:
+ v_len = len(v) + len(list_sep)
+ if line and line_len + v_len > max_len:
+ mlog.log(*line, sep=list_sep, end=lines_sep)
+ line_len = indent
+ line = []
+ line.append(v)
+ line_len += v_len
+ mlog.log(*line, sep=list_sep)
+
+known_library_kwargs = (
+ build.known_shlib_kwargs |
+ build.known_stlib_kwargs
+)
+
+known_build_target_kwargs = (
+ known_library_kwargs |
+ build.known_exe_kwargs |
+ build.known_jar_kwargs |
+ {'target_type'}
+)
+
+TEST_KWARGS: T.List[KwargInfo] = [
+ KwargInfo('args', ContainerTypeInfo(list, (str, mesonlib.File, build.Target)),
+ listify=True, default=[]),
+ KwargInfo('should_fail', bool, default=False),
+ KwargInfo('timeout', int, default=30),
+ KwargInfo('workdir', str, default=None,
+ validator=lambda x: 'must be an absolute path' if not os.path.isabs(x) else None),
+ KwargInfo('protocol', str,
+ default='exitcode',
+ validator=lambda x: 'value must be one of "exitcode", "tap", "gtest", "rust"' if x not in {'exitcode', 'tap', 'gtest', 'rust'} else None,
+ since_values={'gtest': '0.55.0', 'rust': '0.57.0'}),
+ KwargInfo('depends', ContainerTypeInfo(list, (build.CustomTarget, build.BuildTarget)),
+ listify=True, default=[], since='0.46.0'),
+ KwargInfo('priority', int, default=0, since='0.52.0'),
+ # TODO: env needs reworks of the way the environment variable holder itself works probably
+ KwargInfo('env', (EnvironmentVariablesObject, list, dict, str)),
+ KwargInfo('suite', ContainerTypeInfo(list, str), listify=True, default=['']), # yes, a list of empty string
+]
+
+permitted_dependency_kwargs = {
+ 'allow_fallback',
+ 'cmake_args',
+ 'cmake_module_path',
+ 'cmake_package_version',
+ 'components',
+ 'default_options',
+ 'fallback',
+ 'include_type',
+ 'language',
+ 'main',
+ 'method',
+ 'modules',
+ 'native',
+ 'not_found_message',
+ 'optional_modules',
+ 'private_headers',
+ 'required',
+ 'static',
+ 'version',
+}
+
+class Interpreter(InterpreterBase, HoldableObject):
+
+ def __init__(
+ self,
+ _build: build.Build,
+ backend: T.Optional[Backend] = None,
+ subproject: str = '',
+ subdir: str = '',
+ subproject_dir: str = 'subprojects',
+ default_project_options: T.Optional[T.Dict[str, str]] = None,
+ mock: bool = False,
+ ast: T.Optional[mparser.CodeBlockNode] = None,
+ is_translated: bool = False,
+ ) -> None:
+ super().__init__(_build.environment.get_source_dir(), subdir, subproject)
+ self.an_unpicklable_object = mesonlib.an_unpicklable_object
+ self.build = _build
+ self.environment = self.build.environment
+ self.coredata = self.environment.get_coredata()
+ self.backend = backend
+ self.summary = {}
+ self.modules = {}
+ # Subproject directory is usually the name of the subproject, but can
+ # be different for dependencies provided by wrap files.
+ self.subproject_directory_name = subdir.split(os.path.sep)[-1]
+ self.subproject_dir = subproject_dir
+ self.option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt')
+ if not mock and ast is None:
+ self.load_root_meson_file()
+ self.sanity_check_ast()
+ elif ast is not None:
+ self.ast = ast
+ self.sanity_check_ast()
+ self.builtin.update({'meson': MesonMain(self.build, self)})
+ self.generators: T.List[build.Generator] = []
+ self.processed_buildfiles = set() # type: T.Set[str]
+ self.project_args_frozen = False
+ self.global_args_frozen = False # implies self.project_args_frozen
+ self.subprojects: T.Dict[str, SubprojectHolder] = {}
+ self.subproject_stack = []
+ self.configure_file_outputs = {}
+ # Passed from the outside, only used in subprojects.
+ if default_project_options:
+ self.default_project_options = default_project_options.copy()
+ else:
+ self.default_project_options = {}
+ self.project_default_options = {}
+ self.build_func_dict()
+ self.build_holder_map()
+
+ # build_def_files needs to be defined before parse_project is called
+ #
+ # For non-meson subprojects, we'll be using the ast. Even if it does
+ # exist we don't want to add a dependency on it, it's autogenerated
+ # from the actual build files, and is just for reference.
+ self.build_def_files = []
+ build_filename = os.path.join(self.subdir, environment.build_filename)
+ if not is_translated:
+ self.build_def_files.append(build_filename)
+ if not mock:
+ self.parse_project()
+ self._redetect_machines()
+
+ def _redetect_machines(self):
+ # Re-initialize machine descriptions. We can do a better job now because we
+ # have the compilers needed to gain more knowledge, so wipe out old
+ # inference and start over.
+ machines = self.build.environment.machines.miss_defaulting()
+ machines.build = environment.detect_machine_info(self.coredata.compilers.build)
+ self.build.environment.machines = machines.default_missing()
+ assert self.build.environment.machines.build.cpu is not None
+ assert self.build.environment.machines.host.cpu is not None
+ assert self.build.environment.machines.target.cpu is not None
+
+ self.builtin['build_machine'] = \
+ OBJ.MachineHolder(self.build.environment.machines.build, self)
+ self.builtin['host_machine'] = \
+ OBJ.MachineHolder(self.build.environment.machines.host, self)
+ self.builtin['target_machine'] = \
+ OBJ.MachineHolder(self.build.environment.machines.target, self)
+
+ # TODO: Why is this in interpreter.py and not CoreData or Environment?
+ def get_non_matching_default_options(self) -> T.Iterator[T.Tuple[str, str, coredata.UserOption]]:
+ for def_opt_name, def_opt_value in self.project_default_options.items():
+ cur_opt_value = self.coredata.options.get(def_opt_name)
+ try:
+ if cur_opt_value is not None and cur_opt_value.validate_value(def_opt_value) != cur_opt_value.value:
+ yield (str(def_opt_name), def_opt_value, cur_opt_value)
+ except mesonlib.MesonException:
+ # Since the default value does not validate, it cannot be in use
+ # Report the user-specified value as non-matching
+ yield (str(def_opt_name), def_opt_value, cur_opt_value)
+
+ def build_func_dict(self):
+ self.funcs.update({'add_global_arguments': self.func_add_global_arguments,
+ 'add_project_arguments': self.func_add_project_arguments,
+ 'add_global_link_arguments': self.func_add_global_link_arguments,
+ 'add_project_link_arguments': self.func_add_project_link_arguments,
+ 'add_test_setup': self.func_add_test_setup,
+ 'add_languages': self.func_add_languages,
+ 'alias_target': self.func_alias_target,
+ 'assert': self.func_assert,
+ 'benchmark': self.func_benchmark,
+ 'build_target': self.func_build_target,
+ 'configuration_data': self.func_configuration_data,
+ 'configure_file': self.func_configure_file,
+ 'custom_target': self.func_custom_target,
+ 'declare_dependency': self.func_declare_dependency,
+ 'dependency': self.func_dependency,
+ 'disabler': self.func_disabler,
+ 'environment': self.func_environment,
+ 'error': self.func_error,
+ 'executable': self.func_executable,
+ 'generator': self.func_generator,
+ 'gettext': self.func_gettext,
+ 'get_option': self.func_get_option,
+ 'get_variable': self.func_get_variable,
+ 'files': self.func_files,
+ 'find_library': self.func_find_library,
+ 'find_program': self.func_find_program,
+ 'include_directories': self.func_include_directories,
+ 'import': self.func_import,
+ 'install_data': self.func_install_data,
+ 'install_headers': self.func_install_headers,
+ 'install_man': self.func_install_man,
+ 'install_subdir': self.func_install_subdir,
+ 'is_disabler': self.func_is_disabler,
+ 'is_variable': self.func_is_variable,
+ 'jar': self.func_jar,
+ 'join_paths': self.func_join_paths,
+ 'library': self.func_library,
+ 'message': self.func_message,
+ 'warning': self.func_warning,
+ 'option': self.func_option,
+ 'project': self.func_project,
+ 'run_target': self.func_run_target,
+ 'run_command': self.func_run_command,
+ 'set_variable': self.func_set_variable,
+ 'subdir': self.func_subdir,
+ 'subdir_done': self.func_subdir_done,
+ 'subproject': self.func_subproject,
+ 'summary': self.func_summary,
+ 'shared_library': self.func_shared_lib,
+ 'shared_module': self.func_shared_module,
+ 'static_library': self.func_static_lib,
+ 'both_libraries': self.func_both_lib,
+ 'test': self.func_test,
+ 'vcs_tag': self.func_vcs_tag,
+ 'range': self.func_range,
+ })
+ if 'MESON_UNIT_TEST' in os.environ:
+ self.funcs.update({'exception': self.func_exception})
+
+ def build_holder_map(self) -> None:
+ '''
+ Build a mapping of `HoldableObject` types to their corresponding
+ `ObjectHolder`s. This mapping is used in `InterpreterBase` to automatically
+ holderify all returned values from methods and functions.
+ '''
+ self.holder_map.update({
+ mesonlib.File: OBJ.FileHolder,
+ build.SharedLibrary: OBJ.SharedLibraryHolder,
+ build.StaticLibrary: OBJ.StaticLibraryHolder,
+ build.BothLibraries: OBJ.BothLibrariesHolder,
+ build.SharedModule: OBJ.SharedModuleHolder,
+ build.Executable: OBJ.ExecutableHolder,
+ build.Jar: OBJ.JarHolder,
+ build.CustomTarget: OBJ.CustomTargetHolder,
+ build.CustomTargetIndex: OBJ.CustomTargetIndexHolder,
+ build.Generator: OBJ.GeneratorHolder,
+ build.GeneratedList: OBJ.GeneratedListHolder,
+ build.ExtractedObjects: OBJ.GeneratedObjectsHolder,
+ build.RunTarget: OBJ.RunTargetHolder,
+ build.AliasTarget: OBJ.AliasTargetHolder,
+ build.Headers: OBJ.HeadersHolder,
+ build.Man: OBJ.ManHolder,
+ build.Data: OBJ.DataHolder,
+ build.InstallDir: OBJ.InstallDirHolder,
+ build.IncludeDirs: OBJ.IncludeDirsHolder,
+ compilers.RunResult: compilerOBJ.TryRunResultHolder,
+ dependencies.ExternalLibrary: OBJ.ExternalLibraryHolder,
+ coredata.UserFeatureOption: OBJ.FeatureOptionHolder,
+ })
+
+ '''
+ Build a mapping of `HoldableObject` base classes to their
+ corresponding `ObjectHolder`s. The difference to `self.holder_map`
+ is that the keys here define an upper bound instead of requireing an
+ exact match.
+
+ The mappings defined here are only used when there was no direct hit
+ found in `self.holder_map`.
+ '''
+ self.bound_holder_map.update({
+ dependencies.Dependency: OBJ.DependencyHolder,
+ ExternalProgram: OBJ.ExternalProgramHolder,
+ compilers.Compiler: compilerOBJ.CompilerHolder,
+ ModuleObject: OBJ.ModuleObjectHolder,
+ MutableModuleObject: OBJ.MutableModuleObjectHolder,
+ })
+
+ def append_holder_map(self, held_type: T.Type[mesonlib.HoldableObject], holder_type: T.Type[ObjectHolder]) -> None:
+ '''
+ Adds one additional mapping to the `holder_map`.
+
+ The intended use for this function is in the `initialize` method of
+ modules to register custom object holders.
+ '''
+ self.holder_map.update({
+ held_type: holder_type
+ })
+
+ def process_new_values(self, invalues: T.List[TYPE_var]) -> None:
+ invalues = listify(invalues)
+ for v in invalues:
+ if isinstance(v, ObjectHolder):
+ raise InterpreterException('Modules must not return ObjectHolders')
+ if isinstance(v, (build.BuildTarget, build.CustomTarget, build.RunTarget)):
+ self.add_target(v.name, v)
+ elif isinstance(v, list):
+ self.process_new_values(v)
+ elif isinstance(v, ExecutableSerialisation):
+ v.subproject = self.subproject
+ self.build.install_scripts.append(v)
+ elif isinstance(v, build.Data):
+ self.build.data.append(v)
+ elif isinstance(v, dependencies.InternalDependency):
+ # FIXME: This is special cased and not ideal:
+ # The first source is our new VapiTarget, the rest are deps
+ self.process_new_values(v.sources[0])
+ elif isinstance(v, build.InstallDir):
+ self.build.install_dirs.append(v)
+ elif isinstance(v, Test):
+ self.build.tests.append(v)
+ elif isinstance(v, (int, str, bool, Disabler, ObjectHolder, build.GeneratedList,
+ ExternalProgram)):
+ pass
+ else:
+ raise InterpreterException('Module returned a value of unknown type.')
+
+ def get_build_def_files(self) -> T.List[str]:
+ return self.build_def_files
+
+ def add_build_def_file(self, f: mesonlib.FileOrString) -> None:
+ # Use relative path for files within source directory, and absolute path
+ # for system files. Skip files within build directory. Also skip not regular
+ # files (e.g. /dev/stdout) Normalize the path to avoid duplicates, this
+ # is especially important to convert '/' to '\' on Windows.
+ if isinstance(f, mesonlib.File):
+ if f.is_built:
+ return
+ f = os.path.normpath(f.relative_name())
+ elif os.path.isfile(f) and not f.startswith('/dev'):
+ srcdir = Path(self.environment.get_source_dir())
+ builddir = Path(self.environment.get_build_dir())
+ try:
+ f = Path(f).resolve()
+ except OSError:
+ f = Path(f)
+ s = f.stat()
+ if (hasattr(s, 'st_file_attributes') and
+ s.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT != 0 and
+ s.st_reparse_tag == stat.IO_REPARSE_TAG_APPEXECLINK):
+ # This is a Windows Store link which we can't
+ # resolve, so just do our best otherwise.
+ f = f.parent.resolve() / f.name
+ else:
+ raise
+ if builddir in f.parents:
+ return
+ if srcdir in f.parents:
+ f = f.relative_to(srcdir)
+ f = str(f)
+ else:
+ return
+ if f not in self.build_def_files:
+ self.build_def_files.append(f)
+
+ def get_variables(self):
+ return self.variables
+
+ def check_stdlibs(self):
+ machine_choices = [MachineChoice.HOST]
+ if self.coredata.is_cross_build():
+ machine_choices.append(MachineChoice.BUILD)
+ for for_machine in machine_choices:
+ props = self.build.environment.properties[for_machine]
+ for l in self.coredata.compilers[for_machine].keys():
+ try:
+ di = mesonlib.stringlistify(props.get_stdlib(l))
+ except KeyError:
+ continue
+ if len(di) == 1:
+ FeatureNew.single_use('stdlib without variable name', '0.56.0', self.subproject)
+ kwargs = {'native': for_machine is MachineChoice.BUILD,
+ }
+ name = l + '_stdlib'
+ df = DependencyFallbacksHolder(self, [name])
+ df.set_fallback(di)
+ dep = df.lookup(kwargs, force_fallback=True)
+ self.build.stdlibs[for_machine][l] = dep
+
+ def _import_module(self, modname: str, required: bool) -> T.Union[ExtensionModule, NewExtensionModule, NotFoundExtensionModule]:
+ if modname in self.modules:
+ return self.modules[modname]
+ try:
+ module = importlib.import_module('mesonbuild.modules.' + modname)
+ except ImportError:
+ if required:
+ raise InvalidArguments(f'Module "{modname}" does not exist')
+ ext_module = NotFoundExtensionModule()
+ else:
+ ext_module = module.initialize(self)
+ assert isinstance(ext_module, (ExtensionModule, NewExtensionModule))
+ self.modules[modname] = ext_module
+ return ext_module
+
+ @typed_pos_args('import', str)
+ @typed_kwargs(
+ 'import',
+ _REQUIRED_KW.evolve(since='0.59.0'),
+ KwargInfo('disabler', bool, default=False, since='0.59.0'),
+ )
+ @disablerIfNotFound
+ def func_import(self, node: mparser.BaseNode, args: T.Tuple[str],
+ kwargs: 'kwargs.FuncImportModule') -> T.Union[ExtensionModule, NewExtensionModule, NotFoundExtensionModule]:
+ modname = args[0]
+ disabled, required, _ = extract_required_kwarg(kwargs, self.subproject)
+ if disabled:
+ return NotFoundExtensionModule()
+
+ if modname.startswith('unstable-'):
+ plainname = modname.split('-', 1)[1]
+ try:
+ # check if stable module exists
+ mod = self._import_module(plainname, required)
+ # XXX: this is acutally not helpful, since it doesn't do a version check
+ mlog.warning(f'Module {modname} is now stable, please use the {plainname} module instead.')
+ return mod
+ except InvalidArguments:
+ mlog.warning('Module %s has no backwards or forwards compatibility and might not exist in future releases.' % modname, location=node)
+ modname = 'unstable_' + plainname
+ return self._import_module(modname, required)
+
+ @stringArgs
+ @noKwargs
+ def func_files(self, node, args, kwargs):
+ return [mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, fname) for fname in args]
+
+ # Used by declare_dependency() and pkgconfig.generate()
+ def extract_variables(self, kwargs, argname='variables', list_new=False, dict_new=False):
+ variables = kwargs.get(argname, {})
+ if isinstance(variables, dict):
+ if dict_new and variables:
+ FeatureNew.single_use('variables as dictionary', '0.56.0', self.subproject)
+ else:
+ varlist = mesonlib.stringlistify(variables)
+ if list_new:
+ FeatureNew.single_use('variables as list of strings', '0.56.0', self.subproject)
+ variables = collections.OrderedDict()
+ for v in varlist:
+ try:
+ (key, value) = v.split('=', 1)
+ except ValueError:
+ raise InterpreterException(f'Variable {v!r} must have a value separated by equals sign.')
+ variables[key.strip()] = value.strip()
+ for k, v in variables.items():
+ if not k or not v:
+ raise InterpreterException('Empty variable name or value')
+ if any(c.isspace() for c in k):
+ raise InterpreterException(f'Invalid whitespace in variable name "{k}"')
+ if not isinstance(v, str):
+ raise InterpreterException('variables values must be strings.')
+ return variables
+
+ @FeatureNewKwargs('declare_dependency', '0.46.0', ['link_whole'])
+ @FeatureNewKwargs('declare_dependency', '0.54.0', ['variables'])
+ @permittedKwargs({'include_directories', 'link_with', 'sources', 'dependencies',
+ 'compile_args', 'link_args', 'link_whole', 'version',
+ 'variables' })
+ @noPosargs
+ def func_declare_dependency(self, node, args, kwargs):
+ version = kwargs.get('version', self.project_version)
+ if not isinstance(version, str):
+ raise InterpreterException('Version must be a string.')
+ incs = self.extract_incdirs(kwargs)
+ libs = extract_as_list(kwargs, 'link_with')
+ libs_whole = extract_as_list(kwargs, 'link_whole')
+ sources = extract_as_list(kwargs, 'sources')
+ sources = listify(self.source_strings_to_files(sources))
+ deps = extract_as_list(kwargs, 'dependencies')
+ compile_args = mesonlib.stringlistify(kwargs.get('compile_args', []))
+ link_args = mesonlib.stringlistify(kwargs.get('link_args', []))
+ variables = self.extract_variables(kwargs, list_new=True)
+ final_deps = []
+ for d in deps:
+ if not isinstance(d, (dependencies.Dependency, dependencies.ExternalLibrary, dependencies.InternalDependency)):
+ raise InterpreterException('Dependencies must be external deps')
+ final_deps.append(d)
+ for l in libs:
+ if isinstance(l, dependencies.Dependency):
+ raise InterpreterException('''Entries in "link_with" may only be self-built targets,
+external dependencies (including libraries) must go to "dependencies".''')
+ dep = dependencies.InternalDependency(version, incs, compile_args,
+ link_args, libs, libs_whole, sources, final_deps,
+ variables)
+ return dep
+
+ @noKwargs
+ def func_assert(self, node, args, kwargs):
+ if len(args) == 1:
+ FeatureNew.single_use('assert function without message argument', '0.53.0', self.subproject)
+ value = args[0]
+ message = None
+ elif len(args) == 2:
+ value, message = args
+ if not isinstance(message, str):
+ raise InterpreterException('Assert message not a string.')
+ else:
+ raise InterpreterException('Assert takes between one and two arguments')
+ if not isinstance(value, bool):
+ raise InterpreterException('Assert value not bool.')
+ if not value:
+ if message is None:
+ from ..ast import AstPrinter
+ printer = AstPrinter()
+ node.args.arguments[0].accept(printer)
+ message = printer.result
+ raise InterpreterException('Assert failed: ' + message)
+
+ def validate_arguments(self, args, argcount, arg_types):
+ if argcount is not None:
+ if argcount != len(args):
+ raise InvalidArguments('Expected %d arguments, got %d.' %
+ (argcount, len(args)))
+ for actual, wanted in zip(args, arg_types):
+ if wanted is not None:
+ if not isinstance(actual, wanted):
+ raise InvalidArguments('Incorrect argument type.')
+
+ @FeatureNewKwargs('run_command', '0.50.0', ['env'])
+ @FeatureNewKwargs('run_command', '0.47.0', ['check', 'capture'])
+ @permittedKwargs({'check', 'capture', 'env'})
+ def func_run_command(self, node, args, kwargs):
+ return self.run_command_impl(node, args, kwargs)
+
+ def run_command_impl(self,
+ node: mparser.BaseNode,
+ args: T.Sequence[TYPE_nvar],
+ kwargs: TYPE_nkwargs,
+ in_builddir: bool = False) -> RunProcess:
+ if len(args) < 1:
+ raise InterpreterException('Not enough arguments')
+ cmd, *cargs = args
+ capture = kwargs.get('capture', True)
+ srcdir = self.environment.get_source_dir()
+ builddir = self.environment.get_build_dir()
+
+ check = kwargs.get('check', False)
+ if not isinstance(check, bool):
+ raise InterpreterException('Check must be boolean.')
+
+ env = self.unpack_env_kwarg(kwargs)
+
+ m = 'must be a string, or the output of find_program(), files() '\
+ 'or configure_file(), or a compiler object; not {!r}'
+ expanded_args = []
+ if isinstance(cmd, build.Executable):
+ progname = node.args.arguments[0].value
+ msg = 'Program {!r} was overridden with the compiled executable {!r}'\
+ ' and therefore cannot be used during configuration'
+ raise InterpreterException(msg.format(progname, cmd.description()))
+ if isinstance(cmd, ExternalProgram):
+ if not cmd.found():
+ raise InterpreterException(f'command {cmd.get_name()!r} not found or not executable')
+ elif isinstance(cmd, compilers.Compiler):
+ exelist = cmd.get_exelist()
+ cmd = exelist[0]
+ prog = ExternalProgram(cmd, silent=True)
+ if not prog.found():
+ raise InterpreterException(f'Program {cmd!r} not found or not executable')
+ cmd = prog
+ expanded_args = exelist[1:]
+ else:
+ if isinstance(cmd, mesonlib.File):
+ cmd = cmd.absolute_path(srcdir, builddir)
+ elif not isinstance(cmd, str):
+ raise InterpreterException('First argument ' + m.format(cmd))
+ # Prefer scripts in the current source directory
+ search_dir = os.path.join(srcdir, self.subdir)
+ prog = ExternalProgram(cmd, silent=True, search_dir=search_dir)
+ if not prog.found():
+ raise InterpreterException(f'Program or command {cmd!r} not found or not executable')
+ cmd = prog
+ for a in listify(cargs):
+ if isinstance(a, str):
+ expanded_args.append(a)
+ elif isinstance(a, mesonlib.File):
+ expanded_args.append(a.absolute_path(srcdir, builddir))
+ elif isinstance(a, ExternalProgram):
+ expanded_args.append(a.get_path())
+ else:
+ raise InterpreterException('Arguments ' + m.format(a))
+ # If any file that was used as an argument to the command
+ # changes, we must re-run the configuration step.
+ self.add_build_def_file(cmd.get_path())
+ for a in expanded_args:
+ if not os.path.isabs(a):
+ a = os.path.join(builddir if in_builddir else srcdir, self.subdir, a)
+ self.add_build_def_file(a)
+ return RunProcess(cmd, expanded_args, env, srcdir, builddir, self.subdir,
+ self.environment.get_build_command() + ['introspect'],
+ in_builddir=in_builddir, check=check, capture=capture)
+
+ @stringArgs
+ def func_gettext(self, nodes, args, kwargs):
+ raise InterpreterException('Gettext() function has been moved to module i18n. Import it and use i18n.gettext() instead')
+
+ def func_option(self, nodes, args, kwargs):
+ raise InterpreterException('Tried to call option() in build description file. All options must be in the option file.')
+
+ @FeatureNewKwargs('subproject', '0.38.0', ['default_options'])
+ @permittedKwargs({'version', 'default_options', 'required'})
+ @stringArgs
+ def func_subproject(self, nodes, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Subproject takes exactly one argument')
+ return self.do_subproject(args[0], 'meson', kwargs)
+
+ def disabled_subproject(self, subp_name, disabled_feature=None, exception=None):
+ sub = SubprojectHolder(NullSubprojectInterpreter(), os.path.join(self.subproject_dir, subp_name),
+ disabled_feature=disabled_feature, exception=exception)
+ self.subprojects[subp_name] = sub
+ self.coredata.initialized_subprojects.add(subp_name)
+ return sub
+
+ def do_subproject(self, subp_name: str, method: str, kwargs):
+ disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+ if disabled:
+ mlog.log('Subproject', mlog.bold(subp_name), ':', 'skipped: feature', mlog.bold(feature), 'disabled')
+ return self.disabled_subproject(subp_name, disabled_feature=feature)
+
+ default_options = mesonlib.stringlistify(kwargs.get('default_options', []))
+ default_options = coredata.create_options_dict(default_options, subp_name)
+
+ if subp_name == '':
+ raise InterpreterException('Subproject name must not be empty.')
+ if subp_name[0] == '.':
+ raise InterpreterException('Subproject name must not start with a period.')
+ if '..' in subp_name:
+ raise InterpreterException('Subproject name must not contain a ".." path segment.')
+ if os.path.isabs(subp_name):
+ raise InterpreterException('Subproject name must not be an absolute path.')
+ if has_path_sep(subp_name):
+ mlog.warning('Subproject name has a path separator. This may cause unexpected behaviour.',
+ location=self.current_node)
+ if subp_name in self.subproject_stack:
+ fullstack = self.subproject_stack + [subp_name]
+ incpath = ' => '.join(fullstack)
+ raise InvalidCode('Recursive include of subprojects: %s.' % incpath)
+ if subp_name in self.subprojects:
+ subproject = self.subprojects[subp_name]
+ if required and not subproject.found():
+ raise InterpreterException('Subproject "%s" required but not found.' % (subproject.subdir))
+ return subproject
+
+ r = self.environment.wrap_resolver
+ try:
+ subdir = r.resolve(subp_name, method, self.subproject)
+ except wrap.WrapException as e:
+ if not required:
+ mlog.log(e)
+ mlog.log('Subproject ', mlog.bold(subp_name), 'is buildable:', mlog.red('NO'), '(disabling)')
+ return self.disabled_subproject(subp_name, exception=e)
+ raise e
+
+ subdir_abs = os.path.join(self.environment.get_source_dir(), subdir)
+ os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True)
+ self.global_args_frozen = True
+
+ stack = ':'.join(self.subproject_stack + [subp_name])
+ m = ['\nExecuting subproject', mlog.bold(stack)]
+ if method != 'meson':
+ m += ['method', mlog.bold(method)]
+ mlog.log(*m,'\n', nested=False)
+
+ try:
+ if method == 'meson':
+ return self._do_subproject_meson(subp_name, subdir, default_options, kwargs)
+ elif method == 'cmake':
+ return self._do_subproject_cmake(subp_name, subdir, subdir_abs, default_options, kwargs)
+ else:
+ raise mesonlib.MesonBugException(f'The method {method} is invalid for the subproject {subp_name}')
+ # Invalid code is always an error
+ except InvalidCode:
+ raise
+ except Exception as e:
+ if not required:
+ with mlog.nested(subp_name):
+ # Suppress the 'ERROR:' prefix because this exception is not
+ # fatal and VS CI treat any logs with "ERROR:" as fatal.
+ mlog.exception(e, prefix=mlog.yellow('Exception:'))
+ mlog.log('\nSubproject', mlog.bold(subdir), 'is buildable:', mlog.red('NO'), '(disabling)')
+ return self.disabled_subproject(subp_name, exception=e)
+ raise e
+
+ def _do_subproject_meson(self, subp_name: str, subdir: str, default_options, kwargs,
+ ast: T.Optional[mparser.CodeBlockNode] = None,
+ build_def_files: T.Optional[T.List[str]] = None,
+ is_translated: bool = False) -> SubprojectHolder:
+ with mlog.nested(subp_name):
+ new_build = self.build.copy()
+ subi = Interpreter(new_build, self.backend, subp_name, subdir, self.subproject_dir,
+ default_options, ast=ast, is_translated=is_translated)
+ # Those lists are shared by all interpreters. That means that
+ # even if the subproject fails, any modification that the subproject
+ # made to those lists will affect the parent project.
+ subi.subprojects = self.subprojects
+ subi.modules = self.modules
+ subi.holder_map = self.holder_map
+ subi.bound_holder_map = self.bound_holder_map
+ subi.summary = self.summary
+
+ subi.subproject_stack = self.subproject_stack + [subp_name]
+ current_active = self.active_projectname
+ current_warnings_counter = mlog.log_warnings_counter
+ mlog.log_warnings_counter = 0
+ subi.run()
+ subi_warnings = mlog.log_warnings_counter
+ mlog.log_warnings_counter = current_warnings_counter
+
+ mlog.log('Subproject', mlog.bold(subp_name), 'finished.')
+
+ mlog.log()
+
+ if 'version' in kwargs:
+ pv = subi.project_version
+ wanted = kwargs['version']
+ if pv == 'undefined' or not mesonlib.version_compare_many(pv, wanted)[0]:
+ raise InterpreterException(f'Subproject {subp_name} version is {pv} but {wanted} required.')
+ self.active_projectname = current_active
+ self.subprojects.update(subi.subprojects)
+ self.subprojects[subp_name] = SubprojectHolder(subi, subdir, warnings=subi_warnings)
+ # Duplicates are possible when subproject uses files from project root
+ if build_def_files:
+ self.build_def_files = list(set(self.build_def_files + build_def_files))
+ # We always need the subi.build_def_files, to propgate sub-sub-projects
+ self.build_def_files = list(set(self.build_def_files + subi.build_def_files))
+ self.build.merge(subi.build)
+ self.build.subprojects[subp_name] = subi.project_version
+ self.coredata.initialized_subprojects.add(subp_name)
+ return self.subprojects[subp_name]
+
+ def _do_subproject_cmake(self, subp_name, subdir, subdir_abs, default_options, kwargs):
+ with mlog.nested(subp_name):
+ new_build = self.build.copy()
+ prefix = self.coredata.options[OptionKey('prefix')].value
+
+ from ..modules.cmake import CMakeSubprojectOptions
+ options = kwargs.get('options', CMakeSubprojectOptions())
+ if not isinstance(options, CMakeSubprojectOptions):
+ raise InterpreterException('"options" kwarg must be CMakeSubprojectOptions'
+ ' object (created by cmake.subproject_options())')
+
+ cmake_options = mesonlib.stringlistify(kwargs.get('cmake_options', []))
+ cmake_options += options.cmake_options
+ cm_int = CMakeInterpreter(new_build, Path(subdir), Path(subdir_abs), Path(prefix), new_build.environment, self.backend)
+ cm_int.initialise(cmake_options)
+ cm_int.analyse()
+
+ # Generate a meson ast and execute it with the normal do_subproject_meson
+ ast = cm_int.pretend_to_be_meson(options.target_options)
+
+ mlog.log()
+ with mlog.nested('cmake-ast'):
+ mlog.log('Processing generated meson AST')
+
+ # Debug print the generated meson file
+ from ..ast import AstIndentationGenerator, AstPrinter
+ printer = AstPrinter()
+ ast.accept(AstIndentationGenerator())
+ ast.accept(printer)
+ printer.post_process()
+ meson_filename = os.path.join(self.build.environment.get_build_dir(), subdir, 'meson.build')
+ with open(meson_filename, "w", encoding='utf-8') as f:
+ f.write(printer.result)
+
+ mlog.log('Build file:', meson_filename)
+ mlog.cmd_ci_include(meson_filename)
+ mlog.log()
+
+ result = self._do_subproject_meson(subp_name, subdir, default_options, kwargs, ast, cm_int.bs_files, is_translated=True)
+ result.cm_interpreter = cm_int
+
+ mlog.log()
+ return result
+
+ def get_option_internal(self, optname: str):
+ key = OptionKey.from_string(optname).evolve(subproject=self.subproject)
+
+ if not key.is_project():
+ for opts in [self.coredata.options, compilers.base_options]:
+ v = opts.get(key)
+ if v is None or v.yielding:
+ v = opts.get(key.as_root())
+ if v is not None:
+ return v
+
+ try:
+ opt = self.coredata.options[key]
+ if opt.yielding and key.subproject and key.as_root() in self.coredata.options:
+ popt = self.coredata.options[key.as_root()]
+ if type(opt) is type(popt):
+ opt = popt
+ else:
+ # Get class name, then option type as a string
+ opt_type = opt.__class__.__name__[4:][:-6].lower()
+ popt_type = popt.__class__.__name__[4:][:-6].lower()
+ # This is not a hard error to avoid dependency hell, the workaround
+ # when this happens is to simply set the subproject's option directly.
+ mlog.warning('Option {0!r} of type {1!r} in subproject {2!r} cannot yield '
+ 'to parent option of type {3!r}, ignoring parent value. '
+ 'Use -D{2}:{0}=value to set the value for this option manually'
+ '.'.format(optname, opt_type, self.subproject, popt_type),
+ location=self.current_node)
+ return opt
+ except KeyError:
+ pass
+
+ raise InterpreterException('Tried to access unknown option "%s".' % optname)
+
+ @stringArgs
+ @noKwargs
+ def func_get_option(self, nodes, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Argument required for get_option.')
+ optname = args[0]
+ if ':' in optname:
+ raise InterpreterException('Having a colon in option name is forbidden, '
+ 'projects are not allowed to directly access '
+ 'options of other subprojects.')
+ opt = self.get_option_internal(optname)
+ if isinstance(opt, coredata.UserFeatureOption):
+ opt.name = optname
+ return opt
+ elif isinstance(opt, coredata.UserOption):
+ return opt.value
+ return opt
+
+ @noKwargs
+ def func_configuration_data(self, node, args, kwargs):
+ if len(args) > 1:
+ raise InterpreterException('configuration_data takes only one optional positional arguments')
+ elif len(args) == 1:
+ FeatureNew.single_use('configuration_data dictionary', '0.49.0', self.subproject)
+ initial_values = args[0]
+ if not isinstance(initial_values, dict):
+ raise InterpreterException('configuration_data first argument must be a dictionary')
+ else:
+ initial_values = {}
+ return ConfigurationDataObject(self.subproject, initial_values)
+
+ def set_backend(self):
+ # The backend is already set when parsing subprojects
+ if self.backend is not None:
+ return
+ backend = self.coredata.get_option(OptionKey('backend'))
+ from ..backend import backends
+ self.backend = backends.get_backend_from_name(backend, self.build, self)
+
+ if self.backend is None:
+ raise InterpreterException('Unknown backend "%s".' % backend)
+ if backend != self.backend.name:
+ if self.backend.name.startswith('vs'):
+ mlog.log('Auto detected Visual Studio backend:', mlog.bold(self.backend.name))
+ self.coredata.set_option(OptionKey('backend'), self.backend.name)
+
+ # Only init backend options on first invocation otherwise it would
+ # override values previously set from command line.
+ if self.environment.first_invocation:
+ self.coredata.init_backend_options(backend)
+
+ options = {k: v for k, v in self.environment.options.items() if k.is_backend()}
+ self.coredata.set_options(options)
+
+ @stringArgs
+ @permittedKwargs({'version', 'meson_version', 'default_options', 'license', 'subproject_dir'})
+ def func_project(self, node, args, kwargs):
+ if len(args) < 1:
+ raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.')
+ proj_name, *proj_langs = args
+ if ':' in proj_name:
+ raise InvalidArguments(f"Project name {proj_name!r} must not contain ':'")
+
+ # This needs to be evaluated as early as possible, as meson uses this
+ # for things like deprecation testing.
+ if 'meson_version' in kwargs:
+ cv = coredata.version
+ pv = kwargs['meson_version']
+ if not mesonlib.version_compare(cv, pv):
+ raise InterpreterException(f'Meson version is {cv} but project requires {pv}')
+ mesonlib.project_meson_versions[self.subproject] = kwargs['meson_version']
+
+ if os.path.exists(self.option_file):
+ oi = optinterpreter.OptionInterpreter(self.subproject)
+ oi.process(self.option_file)
+ self.coredata.update_project_options(oi.options)
+ self.add_build_def_file(self.option_file)
+
+ # Do not set default_options on reconfigure otherwise it would override
+ # values previously set from command line. That means that changing
+ # default_options in a project will trigger a reconfigure but won't
+ # have any effect.
+ self.project_default_options = mesonlib.stringlistify(kwargs.get('default_options', []))
+ self.project_default_options = coredata.create_options_dict(self.project_default_options, self.subproject)
+
+ # If this is the first invocation we alway sneed to initialize
+ # builtins, if this is a subproject that is new in a re-invocation we
+ # need to initialize builtins for that
+ if self.environment.first_invocation or (self.subproject != '' and self.subproject not in self.coredata.initialized_subprojects):
+ default_options = self.project_default_options.copy()
+ default_options.update(self.default_project_options)
+ self.coredata.init_builtins(self.subproject)
+ else:
+ default_options = {}
+ self.coredata.set_default_options(default_options, self.subproject, self.environment)
+
+ if not self.is_subproject():
+ self.build.project_name = proj_name
+ self.active_projectname = proj_name
+ version = kwargs.get('version', 'undefined')
+ if isinstance(version, list):
+ if len(version) != 1:
+ raise InvalidCode('Version argument is an array with more than one entry.')
+ version = version[0]
+ if isinstance(version, mesonlib.File):
+ FeatureNew.single_use('version from file', '0.57.0', self.subproject)
+ self.add_build_def_file(version)
+ ifname = version.absolute_path(self.environment.source_dir,
+ self.environment.build_dir)
+ try:
+ ver_data = Path(ifname).read_text(encoding='utf-8').split('\n')
+ except FileNotFoundError:
+ raise InterpreterException('Version file not found.')
+ if len(ver_data) == 2 and ver_data[1] == '':
+ ver_data = ver_data[0:1]
+ if len(ver_data) != 1:
+ raise InterpreterException('Version file must contain exactly one line of text.')
+ self.project_version = ver_data[0]
+ elif isinstance(version, str):
+ self.project_version = version
+ else:
+ raise InvalidCode('The version keyword argument must be a string or a file.')
+ if self.build.project_version is None:
+ self.build.project_version = self.project_version
+ proj_license = mesonlib.stringlistify(kwargs.get('license', 'unknown'))
+ self.build.dep_manifest[proj_name] = {'version': self.project_version,
+ 'license': proj_license}
+ if self.subproject in self.build.projects:
+ raise InvalidCode('Second call to project().')
+
+ # spdirname is the subproject_dir for this project, relative to self.subdir.
+ # self.subproject_dir is the subproject_dir for the main project, relative to top source dir.
+ spdirname = kwargs.get('subproject_dir')
+ if spdirname:
+ if not isinstance(spdirname, str):
+ raise InterpreterException('Subproject_dir must be a string')
+ if os.path.isabs(spdirname):
+ raise InterpreterException('Subproject_dir must not be an absolute path.')
+ if spdirname.startswith('.'):
+ raise InterpreterException('Subproject_dir must not begin with a period.')
+ if '..' in spdirname:
+ raise InterpreterException('Subproject_dir must not contain a ".." segment.')
+ if not self.is_subproject():
+ self.subproject_dir = spdirname
+ else:
+ spdirname = 'subprojects'
+ self.build.subproject_dir = self.subproject_dir
+
+ # Load wrap files from this (sub)project.
+ wrap_mode = self.coredata.get_option(OptionKey('wrap_mode'))
+ if not self.is_subproject() or wrap_mode != WrapMode.nopromote:
+ subdir = os.path.join(self.subdir, spdirname)
+ r = wrap.Resolver(self.environment.get_source_dir(), subdir, wrap_mode)
+ if self.is_subproject():
+ self.environment.wrap_resolver.merge_wraps(r)
+ else:
+ self.environment.wrap_resolver = r
+
+ self.build.projects[self.subproject] = proj_name
+ mlog.log('Project name:', mlog.bold(proj_name))
+ mlog.log('Project version:', mlog.bold(self.project_version))
+
+ self.add_languages(proj_langs, True, MachineChoice.HOST)
+ self.add_languages(proj_langs, False, MachineChoice.BUILD)
+
+ self.set_backend()
+ if not self.is_subproject():
+ self.check_stdlibs()
+
+ @FeatureNewKwargs('add_languages', '0.54.0', ['native'])
+ @permittedKwargs({'required', 'native'})
+ @stringArgs
+ def func_add_languages(self, node, args, kwargs):
+ disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+ if disabled:
+ for lang in sorted(args, key=compilers.sort_clink):
+ mlog.log('Compiler for language', mlog.bold(lang), 'skipped: feature', mlog.bold(feature), 'disabled')
+ return False
+ if 'native' in kwargs:
+ return self.add_languages(args, required, self.machine_from_native_kwarg(kwargs))
+ else:
+ # absent 'native' means 'both' for backwards compatibility
+ tv = FeatureNew.get_target_version(self.subproject)
+ if FeatureNew.check_version(tv, '0.54.0'):
+ mlog.warning('add_languages is missing native:, assuming languages are wanted for both host and build.',
+ location=self.current_node)
+
+ success = self.add_languages(args, False, MachineChoice.BUILD)
+ success &= self.add_languages(args, required, MachineChoice.HOST)
+ return success
+
+ @noArgsFlattening
+ @noKwargs
+ def func_message(self, node, args, kwargs):
+ if len(args) > 1:
+ FeatureNew.single_use('message with more than one argument', '0.54.0', self.subproject)
+ args_str = [stringifyUserArguments(i) for i in args]
+ self.message_impl(args_str)
+
+ def message_impl(self, args):
+ mlog.log(mlog.bold('Message:'), *args)
+
+ @noArgsFlattening
+ @FeatureNewKwargs('summary', '0.54.0', ['list_sep'])
+ @permittedKwargs({'section', 'bool_yn', 'list_sep'})
+ @FeatureNew('summary', '0.53.0')
+ def func_summary(self, node, args, kwargs):
+ if len(args) == 1:
+ if not isinstance(args[0], dict):
+ raise InterpreterException('Summary first argument must be dictionary.')
+ values = args[0]
+ elif len(args) == 2:
+ if not isinstance(args[0], str):
+ raise InterpreterException('Summary first argument must be string.')
+ values = {args[0]: args[1]}
+ else:
+ raise InterpreterException('Summary accepts at most 2 arguments.')
+ section = kwargs.get('section', '')
+ if not isinstance(section, str):
+ raise InterpreterException('Summary\'s section keyword argument must be string.')
+ self.summary_impl(section, values, kwargs)
+
+ def summary_impl(self, section, values, kwargs):
+ if self.subproject not in self.summary:
+ self.summary[self.subproject] = Summary(self.active_projectname, self.project_version)
+ self.summary[self.subproject].add_section(section, values, kwargs, self.subproject)
+
+ def _print_summary(self):
+ # Add automatic 'Supbrojects' section in main project.
+ all_subprojects = collections.OrderedDict()
+ for name, subp in sorted(self.subprojects.items()):
+ value = subp.found()
+ if subp.disabled_feature:
+ value = [value, f'Feature {subp.disabled_feature!r} disabled']
+ elif subp.exception:
+ value = [value, str(subp.exception)]
+ elif subp.warnings > 0:
+ value = [value, f'{subp.warnings} warnings']
+ all_subprojects[name] = value
+ if all_subprojects:
+ self.summary_impl('Subprojects', all_subprojects,
+ {'bool_yn': True,
+ 'list_sep': ' ',
+ })
+ # Print all summaries, main project last.
+ mlog.log('') # newline
+ main_summary = self.summary.pop('', None)
+ for subp_name, summary in sorted(self.summary.items()):
+ if self.subprojects[subp_name].found():
+ summary.dump()
+ if main_summary:
+ main_summary.dump()
+
+ @noArgsFlattening
+ @FeatureNew('warning', '0.44.0')
+ @noKwargs
+ def func_warning(self, node, args, kwargs):
+ if len(args) > 1:
+ FeatureNew.single_use('warning with more than one argument', '0.54.0', self.subproject)
+ args_str = [stringifyUserArguments(i) for i in args]
+ mlog.warning(*args_str, location=node)
+
+ @noArgsFlattening
+ @noKwargs
+ def func_error(self, node, args, kwargs):
+ if len(args) > 1:
+ FeatureNew.single_use('error with more than one argument', '0.58.0', self.subproject)
+ args_str = [stringifyUserArguments(i) for i in args]
+ raise InterpreterException('Problem encountered: ' + ' '.join(args_str))
+
+ @noKwargs
+ @noPosargs
+ def func_exception(self, node, args, kwargs):
+ raise Exception()
+
+ def add_languages(self, args: T.Sequence[str], required: bool, for_machine: MachineChoice) -> bool:
+ success = self.add_languages_for(args, required, for_machine)
+ if not self.coredata.is_cross_build():
+ self.coredata.copy_build_options_from_regular_ones()
+ self._redetect_machines()
+ return success
+
+ def should_skip_sanity_check(self, for_machine: MachineChoice) -> bool:
+ should = self.environment.properties.host.get('skip_sanity_check', False)
+ if not isinstance(should, bool):
+ raise InterpreterException('Option skip_sanity_check must be a boolean.')
+ if for_machine != MachineChoice.HOST and not should:
+ return False
+ if not self.environment.is_cross_build() and not should:
+ return False
+ return should
+
+ def add_languages_for(self, args: T.List[str], required: bool, for_machine: MachineChoice) -> None:
+ args = [a.lower() for a in args]
+ langs = set(self.coredata.compilers[for_machine].keys())
+ langs.update(args)
+ if ('vala' in langs or 'cython' in langs) and 'c' not in langs:
+ if 'vala' in langs:
+ FeatureNew.single_use('Adding Vala language without C', '0.59.0', self.subproject)
+ args.append('c')
+
+ success = True
+ for lang in sorted(args, key=compilers.sort_clink):
+ clist = self.coredata.compilers[for_machine]
+ machine_name = for_machine.get_lower_case_name()
+ if lang in clist:
+ comp = clist[lang]
+ else:
+ try:
+ comp = compilers.detect_compiler_for(self.environment, lang, for_machine)
+ if comp is None:
+ raise InvalidArguments('Tried to use unknown language "%s".' % lang)
+ if self.should_skip_sanity_check(for_machine):
+ mlog.log_once('Cross compiler sanity tests disabled via the cross file.')
+ else:
+ comp.sanity_check(self.environment.get_scratch_dir(), self.environment)
+ except Exception:
+ if not required:
+ mlog.log('Compiler for language',
+ mlog.bold(lang), 'for the', machine_name,
+ 'machine not found.')
+ success = False
+ continue
+ else:
+ raise
+
+ if for_machine == MachineChoice.HOST or self.environment.is_cross_build():
+ logger_fun = mlog.log
+ else:
+ logger_fun = mlog.debug
+ logger_fun(comp.get_display_language(), 'compiler for the', machine_name, 'machine:',
+ mlog.bold(' '.join(comp.get_exelist())), comp.get_version_string())
+ if comp.linker is not None:
+ logger_fun(comp.get_display_language(), 'linker for the', machine_name, 'machine:',
+ mlog.bold(' '.join(comp.linker.get_exelist())), comp.linker.id, comp.linker.version)
+ self.build.ensure_static_linker(comp)
+
+ return success
+
+ def program_from_file_for(self, for_machine, prognames):
+ for p in prognames:
+ if isinstance(p, mesonlib.File):
+ continue # Always points to a local (i.e. self generated) file.
+ if not isinstance(p, str):
+ raise InterpreterException('Executable name must be a string')
+ prog = ExternalProgram.from_bin_list(self.environment, for_machine, p)
+ if prog.found():
+ return prog
+ return None
+
+ def program_from_system(self, args, search_dirs, extra_info):
+ # Search for scripts relative to current subdir.
+ # Do not cache found programs because find_program('foobar')
+ # might give different results when run from different source dirs.
+ source_dir = os.path.join(self.environment.get_source_dir(), self.subdir)
+ for exename in args:
+ if isinstance(exename, mesonlib.File):
+ if exename.is_built:
+ search_dir = os.path.join(self.environment.get_build_dir(),
+ exename.subdir)
+ else:
+ search_dir = os.path.join(self.environment.get_source_dir(),
+ exename.subdir)
+ exename = exename.fname
+ extra_search_dirs = []
+ elif isinstance(exename, str):
+ search_dir = source_dir
+ extra_search_dirs = search_dirs
+ else:
+ raise InvalidArguments(f'find_program only accepts strings and files, not {exename!r}')
+ extprog = ExternalProgram(exename, search_dir=search_dir,
+ extra_search_dirs=extra_search_dirs,
+ silent=True)
+ if extprog.found():
+ extra_info.append(f"({' '.join(extprog.get_command())})")
+ return extprog
+
+ def program_from_overrides(self, command_names, extra_info):
+ for name in command_names:
+ if not isinstance(name, str):
+ continue
+ if name in self.build.find_overrides:
+ exe = self.build.find_overrides[name]
+ extra_info.append(mlog.blue('(overridden)'))
+ return exe
+ return None
+
+ def store_name_lookups(self, command_names):
+ for name in command_names:
+ if isinstance(name, str):
+ self.build.searched_programs.add(name)
+
+ def add_find_program_override(self, name, exe):
+ if name in self.build.searched_programs:
+ raise InterpreterException(f'Tried to override finding of executable "{name}" which has already been found.')
+ if name in self.build.find_overrides:
+ raise InterpreterException(f'Tried to override executable "{name}" which has already been overridden.')
+ self.build.find_overrides[name] = exe
+
+ def notfound_program(self, args):
+ return NonExistingExternalProgram(' '.join(args))
+
+ # TODO update modules to always pass `for_machine`. It is bad-form to assume
+ # the host machine.
+ def find_program_impl(self, args, for_machine: MachineChoice = MachineChoice.HOST,
+ required=True, silent=True, wanted='', search_dirs=None,
+ version_func=None):
+ args = mesonlib.listify(args)
+
+ extra_info = []
+ progobj = self.program_lookup(args, for_machine, required, search_dirs, extra_info)
+ if progobj is None:
+ progobj = self.notfound_program(args)
+
+ if isinstance(progobj, ExternalProgram) and not progobj.found():
+ mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'))
+ if required:
+ m = 'Program {!r} not found'
+ raise InterpreterException(m.format(progobj.get_name()))
+ return progobj
+
+ if wanted:
+ if version_func:
+ version = version_func(progobj)
+ elif isinstance(progobj, build.Executable):
+ interp = self
+ if progobj.subproject:
+ interp = self.subprojects[progobj.subproject].held_object
+ assert isinstance(interp, Interpreter)
+ version = interp.project_version
+ elif isinstance(progobj, ExternalProgram):
+ version = progobj.get_version(self)
+ is_found, not_found, found = mesonlib.version_compare_many(version, wanted)
+ if not is_found:
+ mlog.log('Program', mlog.bold(progobj.name), 'found:', mlog.red('NO'),
+ 'found', mlog.normal_cyan(version), 'but need:',
+ mlog.bold(', '.join([f"'{e}'" for e in not_found])), *extra_info)
+ if required:
+ m = 'Invalid version of program, need {!r} {!r} found {!r}.'
+ raise InterpreterException(m.format(progobj.name, not_found, version))
+ return self.notfound_program(args)
+ extra_info.insert(0, mlog.normal_cyan(version))
+
+ # Only store successful lookups
+ self.store_name_lookups(args)
+ mlog.log('Program', mlog.bold(progobj.name), 'found:', mlog.green('YES'), *extra_info)
+ if isinstance(progobj, build.Executable):
+ progobj.was_returned_by_find_program = True
+ return progobj
+
+ def program_lookup(self, args, for_machine, required, search_dirs, extra_info):
+ progobj = self.program_from_overrides(args, extra_info)
+ if progobj:
+ return progobj
+
+ fallback = None
+ wrap_mode = self.coredata.get_option(OptionKey('wrap_mode'))
+ if wrap_mode != WrapMode.nofallback and self.environment.wrap_resolver:
+ fallback = self.environment.wrap_resolver.find_program_provider(args)
+ if fallback and wrap_mode == WrapMode.forcefallback:
+ return self.find_program_fallback(fallback, args, required, extra_info)
+
+ progobj = self.program_from_file_for(for_machine, args)
+ if progobj is None:
+ progobj = self.program_from_system(args, search_dirs, extra_info)
+ if progobj is None and args[0].endswith('python3'):
+ prog = ExternalProgram('python3', mesonlib.python_command, silent=True)
+ progobj = prog if prog.found() else None
+ if progobj is None and fallback and required:
+ progobj = self.find_program_fallback(fallback, args, required, extra_info)
+
+ return progobj
+
+ def find_program_fallback(self, fallback, args, required, extra_info):
+ mlog.log('Fallback to subproject', mlog.bold(fallback), 'which provides program',
+ mlog.bold(' '.join(args)))
+ sp_kwargs = { 'required': required }
+ self.do_subproject(fallback, 'meson', sp_kwargs)
+ return self.program_from_overrides(args, extra_info)
+
+ @FeatureNewKwargs('find_program', '0.53.0', ['dirs'])
+ @FeatureNewKwargs('find_program', '0.52.0', ['version'])
+ @FeatureNewKwargs('find_program', '0.49.0', ['disabler'])
+ @disablerIfNotFound
+ @permittedKwargs({'required', 'native', 'version', 'dirs'})
+ def func_find_program(self, node, args, kwargs):
+ if not args:
+ raise InterpreterException('No program name specified.')
+
+ disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+ if disabled:
+ mlog.log('Program', mlog.bold(' '.join(args)), 'skipped: feature', mlog.bold(feature), 'disabled')
+ return self.notfound_program(args)
+
+ search_dirs = extract_search_dirs(kwargs)
+ wanted = mesonlib.stringlistify(kwargs.get('version', []))
+ for_machine = self.machine_from_native_kwarg(kwargs)
+ return self.find_program_impl(args, for_machine, required=required,
+ silent=False, wanted=wanted,
+ search_dirs=search_dirs)
+
+ def func_find_library(self, node, args, kwargs):
+ raise InvalidCode('find_library() is removed, use meson.get_compiler(\'name\').find_library() instead.\n'
+ 'Look here for documentation: http://mesonbuild.com/Reference-manual.html#compiler-object\n'
+ 'Look here for example: http://mesonbuild.com/howtox.html#add-math-library-lm-portably\n'
+ )
+
+ # When adding kwargs, please check if they make sense in dependencies.get_dep_identifier()
+ @FeatureNewKwargs('dependency', '0.57.0', ['cmake_package_version'])
+ @FeatureNewKwargs('dependency', '0.56.0', ['allow_fallback'])
+ @FeatureNewKwargs('dependency', '0.54.0', ['components'])
+ @FeatureNewKwargs('dependency', '0.52.0', ['include_type'])
+ @FeatureNewKwargs('dependency', '0.50.0', ['not_found_message', 'cmake_module_path', 'cmake_args'])
+ @FeatureNewKwargs('dependency', '0.49.0', ['disabler'])
+ @FeatureNewKwargs('dependency', '0.40.0', ['method'])
+ @FeatureNewKwargs('dependency', '0.38.0', ['default_options'])
+ @disablerIfNotFound
+ @permittedKwargs(permitted_dependency_kwargs)
+ @typed_pos_args('dependency', str)
+ def func_dependency(self, node, args, kwargs):
+ # Replace '' by empty list of names
+ names = [args[0]] if args[0] else []
+ allow_fallback = kwargs.get('allow_fallback')
+ if allow_fallback is not None and not isinstance(allow_fallback, bool):
+ raise InvalidArguments('"allow_fallback" argument must be boolean')
+ fallback = kwargs.get('fallback')
+ default_options = kwargs.get('default_options')
+ df = DependencyFallbacksHolder(self, names, allow_fallback, default_options)
+ df.set_fallback(fallback)
+ not_found_message = kwargs.get('not_found_message', '')
+ if not isinstance(not_found_message, str):
+ raise InvalidArguments('The not_found_message must be a string.')
+ try:
+ d = df.lookup(kwargs)
+ except Exception:
+ if not_found_message:
+ self.message_impl([not_found_message])
+ raise
+ assert isinstance(d, Dependency)
+ if not d.found() and not_found_message:
+ self.message_impl([not_found_message])
+ self.message_impl([not_found_message])
+ # Ensure the correct include type
+ if 'include_type' in kwargs:
+ wanted = kwargs['include_type']
+ if not isinstance(wanted, str):
+ raise InvalidArguments('The `include_type` kwarg must be a string')
+ actual = d.get_include_type()
+ if wanted != actual:
+ mlog.debug(f'Current include type of {args[0]} is {actual}. Converting to requested {wanted}')
+ d = d.generate_system_dependency(wanted)
+ return d
+
+ @FeatureNew('disabler', '0.44.0')
+ @noKwargs
+ @noPosargs
+ def func_disabler(self, node, args, kwargs):
+ return Disabler()
+
+ @FeatureNewKwargs('executable', '0.42.0', ['implib'])
+ @FeatureNewKwargs('executable', '0.56.0', ['win_subsystem'])
+ @FeatureDeprecatedKwargs('executable', '0.56.0', ['gui_app'], extra_message="Use 'win_subsystem' instead.")
+ @permittedKwargs(build.known_exe_kwargs)
+ def func_executable(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, build.Executable)
+
+ @permittedKwargs(build.known_stlib_kwargs)
+ def func_static_lib(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, build.StaticLibrary)
+
+ @permittedKwargs(build.known_shlib_kwargs)
+ def func_shared_lib(self, node, args, kwargs):
+ holder = self.build_target(node, args, kwargs, build.SharedLibrary)
+ holder.shared_library_only = True
+ return holder
+
+ @permittedKwargs(known_library_kwargs)
+ def func_both_lib(self, node, args, kwargs):
+ return self.build_both_libraries(node, args, kwargs)
+
+ @FeatureNew('shared_module', '0.37.0')
+ @permittedKwargs(build.known_shmod_kwargs)
+ def func_shared_module(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, build.SharedModule)
+
+ @permittedKwargs(known_library_kwargs)
+ def func_library(self, node, args, kwargs):
+ return self.build_library(node, args, kwargs)
+
+ @permittedKwargs(build.known_jar_kwargs)
+ def func_jar(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, build.Jar)
+
+ @FeatureNewKwargs('build_target', '0.40.0', ['link_whole', 'override_options'])
+ @permittedKwargs(known_build_target_kwargs)
+ def func_build_target(self, node, args, kwargs):
+ if 'target_type' not in kwargs:
+ raise InterpreterException('Missing target_type keyword argument')
+ target_type = kwargs.pop('target_type')
+ if target_type == 'executable':
+ return self.build_target(node, args, kwargs, build.Executable)
+ elif target_type == 'shared_library':
+ return self.build_target(node, args, kwargs, build.SharedLibrary)
+ elif target_type == 'shared_module':
+ FeatureNew('build_target(target_type: \'shared_module\')',
+ '0.51.0').use(self.subproject)
+ return self.build_target(node, args, kwargs, build.SharedModule)
+ elif target_type == 'static_library':
+ return self.build_target(node, args, kwargs, build.StaticLibrary)
+ elif target_type == 'both_libraries':
+ return self.build_both_libraries(node, args, kwargs)
+ elif target_type == 'library':
+ return self.build_library(node, args, kwargs)
+ elif target_type == 'jar':
+ return self.build_target(node, args, kwargs, build.Jar)
+ else:
+ raise InterpreterException('Unknown target_type.')
+
+ @permittedKwargs({'input', 'output', 'fallback', 'command', 'replace_string'})
+ @FeatureDeprecatedKwargs('custom_target', '0.47.0', ['build_always'],
+ 'combine build_by_default and build_always_stale instead.')
+ @noPosargs
+ def func_vcs_tag(self, node, args, kwargs):
+ if 'input' not in kwargs or 'output' not in kwargs:
+ raise InterpreterException('Keyword arguments input and output must exist')
+ if 'fallback' not in kwargs:
+ FeatureNew.single_use('Optional fallback in vcs_tag', '0.41.0', self.subproject)
+ fallback = kwargs.pop('fallback', self.project_version)
+ if not isinstance(fallback, str):
+ raise InterpreterException('Keyword argument fallback must be a string.')
+ replace_string = kwargs.pop('replace_string', '@VCS_TAG@')
+ regex_selector = '(.*)' # default regex selector for custom command: use complete output
+ vcs_cmd = kwargs.get('command', None)
+ if vcs_cmd and not isinstance(vcs_cmd, list):
+ vcs_cmd = [vcs_cmd]
+ source_dir = os.path.normpath(os.path.join(self.environment.get_source_dir(), self.subdir))
+ if vcs_cmd:
+ # Is the command an executable in path or maybe a script in the source tree?
+ vcs_cmd[0] = shutil.which(vcs_cmd[0]) or os.path.join(source_dir, vcs_cmd[0])
+ else:
+ vcs = mesonlib.detect_vcs(source_dir)
+ if vcs:
+ mlog.log('Found {} repository at {}'.format(vcs['name'], vcs['wc_dir']))
+ vcs_cmd = vcs['get_rev'].split()
+ regex_selector = vcs['rev_regex']
+ else:
+ vcs_cmd = [' '] # executing this cmd will fail in vcstagger.py and force to use the fallback string
+ # vcstagger.py parameters: infile, outfile, fallback, source_dir, replace_string, regex_selector, command...
+ kwargs['command'] = self.environment.get_build_command() + \
+ ['--internal',
+ 'vcstagger',
+ '@INPUT0@',
+ '@OUTPUT0@',
+ fallback,
+ source_dir,
+ replace_string,
+ regex_selector] + vcs_cmd
+ kwargs.setdefault('build_by_default', True)
+ kwargs.setdefault('build_always_stale', True)
+ return self._func_custom_target_impl(node, [kwargs['output']], kwargs)
+
+ @FeatureNew('subdir_done', '0.46.0')
+ @noPosargs
+ @noKwargs
+ def func_subdir_done(self, node, args, kwargs):
+ raise SubdirDoneRequest()
+
+ @stringArgs
+ @FeatureNewKwargs('custom_target', '0.57.0', ['env'])
+ @FeatureNewKwargs('custom_target', '0.48.0', ['console'])
+ @FeatureNewKwargs('custom_target', '0.47.0', ['install_mode', 'build_always_stale'])
+ @FeatureNewKwargs('custom_target', '0.40.0', ['build_by_default'])
+ @FeatureNewKwargs('custom_target', '0.59.0', ['feed'])
+ @permittedKwargs({'input', 'output', 'command', 'install', 'install_dir', 'install_mode',
+ 'build_always', 'capture', 'depends', 'depend_files', 'depfile',
+ 'build_by_default', 'build_always_stale', 'console', 'env',
+ 'feed'})
+ def func_custom_target(self, node, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('custom_target: Only one positional argument is allowed, and it must be a string name')
+ if 'depfile' in kwargs and ('@BASENAME@' in kwargs['depfile'] or '@PLAINNAME@' in kwargs['depfile']):
+ FeatureNew.single_use('substitutions in custom_target depfile', '0.47.0', self.subproject)
+ return self._func_custom_target_impl(node, args, kwargs)
+
+ def _func_custom_target_impl(self, node, args, kwargs):
+ 'Implementation-only, without FeatureNew checks, for internal use'
+ name = args[0]
+ kwargs['install_mode'] = self._get_kwarg_install_mode(kwargs)
+ if 'input' in kwargs:
+ try:
+ kwargs['input'] = self.source_strings_to_files(extract_as_list(kwargs, 'input'))
+ except mesonlib.MesonException:
+ mlog.warning('''Custom target input \'%s\' can\'t be converted to File object(s).
+This will become a hard error in the future.''' % kwargs['input'], location=self.current_node)
+ kwargs['env'] = self.unpack_env_kwarg(kwargs)
+ if 'command' in kwargs and isinstance(kwargs['command'], list) and kwargs['command']:
+ if isinstance(kwargs['command'][0], str):
+ kwargs['command'][0] = self.func_find_program(node, kwargs['command'][0], {})
+ tg = build.CustomTarget(name, self.subdir, self.subproject, kwargs, backend=self.backend)
+ self.add_target(name, tg)
+ return tg
+
+ @FeatureNewKwargs('run_target', '0.57.0', ['env'])
+ @permittedKwargs({'command', 'depends', 'env'})
+ def func_run_target(self, node, args, kwargs):
+ if len(args) > 1:
+ raise InvalidCode('Run_target takes only one positional argument: the target name.')
+ elif len(args) == 1:
+ if 'command' not in kwargs:
+ raise InterpreterException('Missing "command" keyword argument')
+ all_args = extract_as_list(kwargs, 'command')
+ deps = extract_as_list(kwargs, 'depends')
+ else:
+ raise InterpreterException('Run_target needs at least one positional argument.')
+
+ cleaned_args = []
+ for i in listify(all_args):
+ if not isinstance(i, (str, build.BuildTarget, build.CustomTarget, ExternalProgram, mesonlib.File)):
+ mlog.debug('Wrong type:', str(i))
+ raise InterpreterException('Invalid argument to run_target.')
+ if isinstance(i, ExternalProgram) and not i.found():
+ raise InterpreterException(f'Tried to use non-existing executable {i.name!r}')
+ cleaned_args.append(i)
+ if isinstance(cleaned_args[0], str):
+ cleaned_args[0] = self.func_find_program(node, cleaned_args[0], {})
+ name = args[0]
+ if not isinstance(name, str):
+ raise InterpreterException('First argument must be a string.')
+ cleaned_deps = []
+ for d in deps:
+ if not isinstance(d, (build.BuildTarget, build.CustomTarget)):
+ raise InterpreterException('Depends items must be build targets.')
+ cleaned_deps.append(d)
+ env = self.unpack_env_kwarg(kwargs)
+ tg = build.RunTarget(name, cleaned_args, cleaned_deps, self.subdir, self.subproject, env)
+ self.add_target(name, tg)
+ full_name = (self.subproject, name)
+ assert(full_name not in self.build.run_target_names)
+ self.build.run_target_names.add(full_name)
+ return tg
+
+ @FeatureNew('alias_target', '0.52.0')
+ @noKwargs
+ def func_alias_target(self, node, args, kwargs):
+ if len(args) < 2:
+ raise InvalidCode('alias_target takes at least 2 arguments.')
+ name = args[0]
+ if not isinstance(name, str):
+ raise InterpreterException('First argument must be a string.')
+ deps = listify(args[1:])
+ for d in deps:
+ if not isinstance(d, (build.BuildTarget, build.CustomTarget)):
+ raise InterpreterException('Depends items must be build targets.')
+ tg = build.AliasTarget(name, deps, self.subdir, self.subproject)
+ self.add_target(name, tg)
+ return tg
+
+ @permittedKwargs({'arguments', 'output', 'depends', 'depfile', 'capture',
+ 'preserve_path_from'})
+ @typed_pos_args('generator', (build.Executable, ExternalProgram))
+ @typed_kwargs(
+ 'generator',
+ KwargInfo('arguments', ContainerTypeInfo(list, str, allow_empty=False), required=True, listify=True),
+ KwargInfo('output', ContainerTypeInfo(list, str, allow_empty=False), required=True, listify=True),
+ KwargInfo('depfile', str, validator=lambda x: 'Depfile must be a plain filename with a subdirectory' if has_path_sep(x) else None),
+ KwargInfo('capture', bool, default=False, since='0.43.0'),
+ KwargInfo('depends', ContainerTypeInfo(list, (build.BuildTarget, build.CustomTarget)), default=[], listify=True),
+ )
+ def func_generator(self, node: mparser.FunctionNode,
+ args: T.Tuple[T.Union[build.Executable, ExternalProgram]],
+ kwargs: 'kwargs.FuncGenerator') -> build.Generator:
+ for rule in kwargs['output']:
+ if '@BASENAME@' not in rule and '@PLAINNAME@' not in rule:
+ raise InvalidArguments('Every element of "output" must contain @BASENAME@ or @PLAINNAME@.')
+ if has_path_sep(rule):
+ raise InvalidArguments('"output" must not contain a directory separator.')
+ if len(kwargs['output']) > 1:
+ for o in kwargs['output']:
+ if '@OUTPUT@' in o:
+ raise InvalidArguments('Tried to use @OUTPUT@ in a rule with more than one output.')
+
+ gen = build.Generator(args[0], **kwargs)
+ self.generators.append(gen)
+ return gen
+
+ @typed_pos_args('benchmark', str, (build.Executable, build.Jar, ExternalProgram, mesonlib.File))
+ @typed_kwargs('benchmark', *TEST_KWARGS)
+ def func_benchmark(self, node: mparser.BaseNode,
+ args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File]],
+ kwargs: 'kwargs.FuncBenchmark') -> None:
+ self.add_test(node, args, kwargs, False)
+
+ @typed_pos_args('test', str, (build.Executable, build.Jar, ExternalProgram, mesonlib.File))
+ @typed_kwargs('benchmark', *TEST_KWARGS, KwargInfo('is_parallel', bool, default=True))
+ def func_test(self, node: mparser.BaseNode,
+ args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File]],
+ kwargs: 'kwargs.FuncTest') -> None:
+ self.add_test(node, args, kwargs, True)
+
+ def unpack_env_kwarg(self, kwargs: T.Union[EnvironmentVariablesObject, T.Dict[str, str], T.List[str]]) -> build.EnvironmentVariables:
+ envlist = kwargs.get('env', EnvironmentVariablesObject())
+ if isinstance(envlist, EnvironmentVariablesObject):
+ env = envlist.vars
+ elif isinstance(envlist, dict):
+ FeatureNew.single_use('environment dictionary', '0.52.0', self.subproject)
+ env = EnvironmentVariablesObject(envlist)
+ env = env.vars
+ else:
+ # Convert from array to environment object
+ env = EnvironmentVariablesObject(envlist)
+ env = env.vars
+ return env
+
+ def make_test(self, node: mparser.BaseNode,
+ args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File]],
+ kwargs: 'kwargs.BaseTest') -> Test:
+ name = args[0]
+ if ':' in name:
+ mlog.deprecation(f'":" is not allowed in test name "{name}", it has been replaced with "_"',
+ location=node)
+ name = name.replace(':', '_')
+ exe = args[1]
+ if isinstance(exe, mesonlib.File):
+ exe = self.func_find_program(node, args[1], {})
+
+ env = self.unpack_env_kwarg(kwargs)
+
+ if kwargs['timeout'] <= 0:
+ FeatureNew.single_use('test() timeout <= 0', '0.57.0', self.subproject)
+
+ prj = self.subproject if self.is_subproject() else self.build.project_name
+
+ suite: T.List[str] = []
+ for s in kwargs['suite']:
+ if s:
+ s = ':' + s
+ suite.append(prj.replace(' ', '_').replace(':', '_') + s)
+
+ return Test(name,
+ prj,
+ suite,
+ exe,
+ kwargs['depends'],
+ kwargs.get('is_parallel', False),
+ kwargs['args'],
+ env,
+ kwargs['should_fail'],
+ kwargs['timeout'],
+ kwargs['workdir'],
+ kwargs['protocol'],
+ kwargs['priority'])
+
+ def add_test(self, node: mparser.BaseNode, args: T.List, kwargs: T.Dict[str, T.Any], is_base_test: bool):
+ t = self.make_test(node, args, kwargs)
+ if is_base_test:
+ self.build.tests.append(t)
+ mlog.debug('Adding test', mlog.bold(t.name, True))
+ else:
+ self.build.benchmarks.append(t)
+ mlog.debug('Adding benchmark', mlog.bold(t.name, True))
+
+ @typed_pos_args('install_headers', varargs=(str, mesonlib.File))
+ @typed_kwargs(
+ 'install_headers',
+ KwargInfo('install_dir', (str, None)),
+ KwargInfo('subdir', (str, None)),
+ _INSTALL_MODE_KW.evolve(since='0.47.0'),
+ )
+ def func_install_headers(self, node: mparser.BaseNode,
+ args: T.Tuple[T.List['mesonlib.FileOrString']],
+ kwargs: 'kwargs.FuncInstallHeaders') -> build.Headers:
+ source_files = self.source_strings_to_files(args[0])
+ install_subdir = kwargs['subdir']
+ if install_subdir is not None and os.path.isabs(install_subdir):
+ mlog.deprecation('Subdir keyword must not be an absolute path. This will be a hard error in the next release.')
+
+ h = build.Headers(source_files, install_subdir, kwargs['install_dir'],
+ kwargs['install_mode'], self.subproject)
+ self.build.headers.append(h)
+
+ return h
+
+ @typed_pos_args('install_man', varargs=(str, mesonlib.File))
+ @typed_kwargs(
+ 'install_man',
+ KwargInfo('install_dir', (str, None)),
+ KwargInfo('locale', (str, None), since='0.58.0'),
+ _INSTALL_MODE_KW.evolve(since='0.47.0')
+ )
+ def func_install_man(self, node: mparser.BaseNode,
+ args: T.Tuple[T.List['mesonlib.FileOrString']],
+ kwargs: 'kwargs.FuncInstallMan') -> build.Man:
+ # We just need to narrow this, because the input is limited to files and
+ # Strings as inputs, so only Files will be returned
+ sources = self.source_strings_to_files(args[0])
+ for s in sources:
+ try:
+ num = int(s.rsplit('.', 1)[-1])
+ except (IndexError, ValueError):
+ num = 0
+ if not 1 <= num <= 9:
+ raise InvalidArguments('Man file must have a file extension of a number between 1 and 9')
+
+ m = build.Man(sources, kwargs['install_dir'], kwargs['install_mode'],
+ self.subproject, kwargs['locale'])
+ self.build.man.append(m)
+
+ return m
+
+ @FeatureNewKwargs('subdir', '0.44.0', ['if_found'])
+ @permittedKwargs({'if_found'})
+ def func_subdir(self, node, args, kwargs):
+ self.validate_arguments(args, 1, [str])
+ mesonlib.check_direntry_issues(args)
+ if '..' in args[0]:
+ raise InvalidArguments('Subdir contains ..')
+ if self.subdir == '' and args[0] == self.subproject_dir:
+ raise InvalidArguments('Must not go into subprojects dir with subdir(), use subproject() instead.')
+ if self.subdir == '' and args[0].startswith('meson-'):
+ raise InvalidArguments('The "meson-" prefix is reserved and cannot be used for top-level subdir().')
+ for i in mesonlib.extract_as_list(kwargs, 'if_found'):
+ if not hasattr(i, 'found'):
+ raise InterpreterException('Object used in if_found does not have a found method.')
+ if not i.found():
+ return
+ prev_subdir = self.subdir
+ subdir = os.path.join(prev_subdir, args[0])
+ if os.path.isabs(subdir):
+ raise InvalidArguments('Subdir argument must be a relative path.')
+ absdir = os.path.join(self.environment.get_source_dir(), subdir)
+ symlinkless_dir = os.path.realpath(absdir)
+ build_file = os.path.join(symlinkless_dir, 'meson.build')
+ if build_file in self.processed_buildfiles:
+ raise InvalidArguments('Tried to enter directory "%s", which has already been visited.'
+ % subdir)
+ self.processed_buildfiles.add(build_file)
+ self.subdir = subdir
+ os.makedirs(os.path.join(self.environment.build_dir, subdir), exist_ok=True)
+ buildfilename = os.path.join(self.subdir, environment.build_filename)
+ self.build_def_files.append(buildfilename)
+ absname = os.path.join(self.environment.get_source_dir(), buildfilename)
+ if not os.path.isfile(absname):
+ self.subdir = prev_subdir
+ raise InterpreterException(f"Non-existent build file '{buildfilename!s}'")
+ with open(absname, encoding='utf-8') as f:
+ code = f.read()
+ assert(isinstance(code, str))
+ try:
+ codeblock = mparser.Parser(code, absname).parse()
+ except mesonlib.MesonException as me:
+ me.file = absname
+ raise me
+ try:
+ self.evaluate_codeblock(codeblock)
+ except SubdirDoneRequest:
+ pass
+ self.subdir = prev_subdir
+
+ def _get_kwarg_install_mode(self, kwargs: T.Dict[str, T.Any]) -> T.Optional[FileMode]:
+ if kwargs.get('install_mode', None) is None:
+ return None
+ install_mode: T.List[str] = []
+ mode = mesonlib.typeslistify(kwargs.get('install_mode', []), (str, int))
+ for m in mode:
+ # We skip any arguments that are set to `false`
+ if m is False:
+ m = None
+ install_mode.append(m)
+ if len(install_mode) > 3:
+ raise InvalidArguments('Keyword argument install_mode takes at '
+ 'most 3 arguments.')
+ if len(install_mode) > 0 and install_mode[0] is not None and \
+ not isinstance(install_mode[0], str):
+ raise InvalidArguments('Keyword argument install_mode requires the '
+ 'permissions arg to be a string or false')
+ return FileMode(*install_mode)
+
+ @typed_pos_args('install_data', varargs=(str, mesonlib.File))
+ @typed_kwargs(
+ 'install_data',
+ KwargInfo('install_dir', str),
+ KwargInfo('sources', ContainerTypeInfo(list, (str, mesonlib.File)), listify=True, default=[]),
+ KwargInfo('rename', ContainerTypeInfo(list, str), default=[], listify=True, since='0.46.0'),
+ _INSTALL_MODE_KW.evolve(since='0.38.0'),
+ )
+ def func_install_data(self, node: mparser.BaseNode,
+ args: T.Tuple[T.List['mesonlib.FileOrString']],
+ kwargs: 'kwargs.FuncInstallData') -> build.Data:
+ sources = self.source_strings_to_files(args[0] + kwargs['sources'])
+ rename = kwargs['rename'] or None
+ if rename:
+ if len(rename) != len(sources):
+ raise InvalidArguments(
+ '"rename" and "sources" argument lists must be the same length if "rename" is given. '
+ f'Rename has {len(rename)} elements and sources has {len(sources)}.')
+
+ data = build.Data(
+ sources, kwargs['install_dir'], kwargs['install_mode'],
+ self.subproject, rename)
+ self.build.data.append(data)
+ return data
+
+ @typed_pos_args('install_subdir', str)
+ @typed_kwargs(
+ 'install_subdir',
+ KwargInfo('install_dir', str, required=True),
+ KwargInfo('strip_directory', bool, default=False),
+ KwargInfo('exclude_files', ContainerTypeInfo(list, str),
+ default=[], listify=True, since='0.42.0',
+ validator=lambda x: 'cannot be absolute' if any(os.path.isabs(d) for d in x) else None),
+ KwargInfo('exclude_directories', ContainerTypeInfo(list, str),
+ default=[], listify=True, since='0.42.0',
+ validator=lambda x: 'cannot be absolute' if any(os.path.isabs(d) for d in x) else None),
+ _INSTALL_MODE_KW.evolve(since='0.38.0'),
+ )
+ def func_install_subdir(self, node: mparser.BaseNode, args: T.Tuple[str],
+ kwargs: 'kwargs.FuncInstallSubdir') -> build.InstallDir:
+ exclude = (set(kwargs['exclude_files']), set(kwargs['exclude_directories']))
+ idir = build.InstallDir(
+ self.subdir,
+ args[0],
+ kwargs['install_dir'],
+ kwargs['install_mode'],
+ exclude,
+ kwargs['strip_directory'],
+ self.subproject)
+ self.build.install_dirs.append(idir)
+ return idir
+
+ @FeatureNewKwargs('configure_file', '0.47.0', ['copy', 'output_format', 'install_mode', 'encoding'])
+ @FeatureNewKwargs('configure_file', '0.46.0', ['format'])
+ @FeatureNewKwargs('configure_file', '0.41.0', ['capture'])
+ @FeatureNewKwargs('configure_file', '0.50.0', ['install'])
+ @FeatureNewKwargs('configure_file', '0.52.0', ['depfile'])
+ @permittedKwargs({'input', 'output', 'configuration', 'command', 'copy', 'depfile',
+ 'install_dir', 'install_mode', 'capture', 'install', 'format',
+ 'output_format', 'encoding'})
+ @noPosargs
+ def func_configure_file(self, node, args, kwargs):
+ if 'output' not in kwargs:
+ raise InterpreterException('Required keyword argument "output" not defined.')
+ actions = {'configuration', 'command', 'copy'}.intersection(kwargs.keys())
+ if len(actions) == 0:
+ raise InterpreterException('Must specify an action with one of these '
+ 'keyword arguments: \'configuration\', '
+ '\'command\', or \'copy\'.')
+ elif len(actions) == 2:
+ raise InterpreterException('Must not specify both {!r} and {!r} '
+ 'keyword arguments since they are '
+ 'mutually exclusive.'.format(*actions))
+ elif len(actions) == 3:
+ raise InterpreterException('Must specify one of {!r}, {!r}, and '
+ '{!r} keyword arguments since they are '
+ 'mutually exclusive.'.format(*actions))
+ if 'capture' in kwargs:
+ if not isinstance(kwargs['capture'], bool):
+ raise InterpreterException('"capture" keyword must be a boolean.')
+ if 'command' not in kwargs:
+ raise InterpreterException('"capture" keyword requires "command" keyword.')
+
+ if 'format' in kwargs:
+ fmt = kwargs['format']
+ if not isinstance(fmt, str):
+ raise InterpreterException('"format" keyword must be a string.')
+ else:
+ fmt = 'meson'
+
+ if fmt not in ('meson', 'cmake', 'cmake@'):
+ raise InterpreterException('"format" possible values are "meson", "cmake" or "cmake@".')
+
+ if 'output_format' in kwargs:
+ output_format = kwargs['output_format']
+ if not isinstance(output_format, str):
+ raise InterpreterException('"output_format" keyword must be a string.')
+ else:
+ output_format = 'c'
+
+ if output_format not in ('c', 'nasm'):
+ raise InterpreterException('"format" possible values are "c" or "nasm".')
+
+ if 'depfile' in kwargs:
+ depfile = kwargs['depfile']
+ if not isinstance(depfile, str):
+ raise InterpreterException('depfile file name must be a string')
+ else:
+ depfile = None
+
+ # Validate input
+ inputs = self.source_strings_to_files(extract_as_list(kwargs, 'input'))
+ inputs_abs = []
+ for f in inputs:
+ if isinstance(f, mesonlib.File):
+ inputs_abs.append(f.absolute_path(self.environment.source_dir,
+ self.environment.build_dir))
+ self.add_build_def_file(f)
+ else:
+ raise InterpreterException('Inputs can only be strings or file objects')
+ # Validate output
+ output = kwargs['output']
+ if not isinstance(output, str):
+ raise InterpreterException('Output file name must be a string')
+ if inputs_abs:
+ values = mesonlib.get_filenames_templates_dict(inputs_abs, None)
+ outputs = mesonlib.substitute_values([output], values)
+ output = outputs[0]
+ if depfile:
+ depfile = mesonlib.substitute_values([depfile], values)[0]
+ ofile_rpath = os.path.join(self.subdir, output)
+ if ofile_rpath in self.configure_file_outputs:
+ mesonbuildfile = os.path.join(self.subdir, 'meson.build')
+ current_call = f"{mesonbuildfile}:{self.current_lineno}"
+ first_call = "{}:{}".format(mesonbuildfile, self.configure_file_outputs[ofile_rpath])
+ mlog.warning('Output file', mlog.bold(ofile_rpath, True), 'for configure_file() at', current_call, 'overwrites configure_file() output at', first_call)
+ else:
+ self.configure_file_outputs[ofile_rpath] = self.current_lineno
+ if os.path.dirname(output) != '':
+ raise InterpreterException('Output file name must not contain a subdirectory.')
+ (ofile_path, ofile_fname) = os.path.split(os.path.join(self.subdir, output))
+ ofile_abs = os.path.join(self.environment.build_dir, ofile_path, ofile_fname)
+ # Perform the appropriate action
+ if 'configuration' in kwargs:
+ conf = kwargs['configuration']
+ if isinstance(conf, dict):
+ FeatureNew.single_use('configure_file.configuration dictionary', '0.49.0', self.subproject)
+ conf = ConfigurationDataObject(self.subproject, conf)
+ elif not isinstance(conf, ConfigurationDataObject):
+ raise InterpreterException('Argument "configuration" is not of type configuration_data')
+ mlog.log('Configuring', mlog.bold(output), 'using configuration')
+ if len(inputs) > 1:
+ raise InterpreterException('At most one input file can given in configuration mode')
+ if inputs:
+ os.makedirs(os.path.join(self.environment.build_dir, self.subdir), exist_ok=True)
+ file_encoding = kwargs.setdefault('encoding', 'utf-8')
+ missing_variables, confdata_useless = \
+ mesonlib.do_conf_file(inputs_abs[0], ofile_abs, conf.conf_data,
+ fmt, file_encoding)
+ if missing_variables:
+ var_list = ", ".join(map(repr, sorted(missing_variables)))
+ mlog.warning(
+ "The variable(s) %s in the input file '%s' are not "
+ "present in the given configuration data." % (
+ var_list, inputs[0]), location=node)
+ if confdata_useless:
+ ifbase = os.path.basename(inputs_abs[0])
+ mlog.warning('Got an empty configuration_data() object and found no '
+ f'substitutions in the input file {ifbase!r}. If you want to '
+ 'copy a file to the build dir, use the \'copy:\' keyword '
+ 'argument added in 0.47.0', location=node)
+ else:
+ mesonlib.dump_conf_header(ofile_abs, conf.conf_data, output_format)
+ conf.mark_used()
+ elif 'command' in kwargs:
+ if len(inputs) > 1:
+ FeatureNew.single_use('multiple inputs in configure_file()', '0.52.0', self.subproject)
+ # We use absolute paths for input and output here because the cwd
+ # that the command is run from is 'unspecified', so it could change.
+ # Currently it's builddir/subdir for in_builddir else srcdir/subdir.
+ values = mesonlib.get_filenames_templates_dict(inputs_abs, [ofile_abs])
+ if depfile:
+ depfile = os.path.join(self.environment.get_scratch_dir(), depfile)
+ values['@DEPFILE@'] = depfile
+ # Substitute @INPUT@, @OUTPUT@, etc here.
+ cmd = mesonlib.substitute_values(kwargs['command'], values)
+ mlog.log('Configuring', mlog.bold(output), 'with command')
+ res = self.run_command_impl(node, cmd, {}, True)
+ if res.returncode != 0:
+ raise InterpreterException('Running configure command failed.\n%s\n%s' %
+ (res.stdout, res.stderr))
+ if 'capture' in kwargs and kwargs['capture']:
+ dst_tmp = ofile_abs + '~'
+ file_encoding = kwargs.setdefault('encoding', 'utf-8')
+ with open(dst_tmp, 'w', encoding=file_encoding) as f:
+ f.writelines(res.stdout)
+ if inputs_abs:
+ shutil.copymode(inputs_abs[0], dst_tmp)
+ mesonlib.replace_if_different(ofile_abs, dst_tmp)
+ if depfile:
+ mlog.log('Reading depfile:', mlog.bold(depfile))
+ with open(depfile, encoding='utf-8') as f:
+ df = DepFile(f.readlines())
+ deps = df.get_all_dependencies(ofile_fname)
+ for dep in deps:
+ self.add_build_def_file(dep)
+
+ elif 'copy' in kwargs:
+ if len(inputs_abs) != 1:
+ raise InterpreterException('Exactly one input file must be given in copy mode')
+ os.makedirs(os.path.join(self.environment.build_dir, self.subdir), exist_ok=True)
+ shutil.copy2(inputs_abs[0], ofile_abs)
+ else:
+ # Not reachable
+ raise AssertionError
+ # Install file if requested, we check for the empty string
+ # for backwards compatibility. That was the behaviour before
+ # 0.45.0 so preserve it.
+ idir = kwargs.get('install_dir', '')
+ if idir is False:
+ idir = ''
+ mlog.deprecation('Please use the new `install:` kwarg instead of passing '
+ '`false` to `install_dir:`', location=node)
+ if not isinstance(idir, str):
+ if isinstance(idir, list) and len(idir) == 0:
+ mlog.deprecation('install_dir: kwarg must be a string and not an empty array. '
+ 'Please use the install: kwarg to enable or disable installation. '
+ 'This will be a hard error in the next release.')
+ else:
+ raise InterpreterException('"install_dir" must be a string')
+ install = kwargs.get('install', idir != '')
+ if not isinstance(install, bool):
+ raise InterpreterException('"install" must be a boolean')
+ if install:
+ if not idir:
+ raise InterpreterException('"install_dir" must be specified '
+ 'when "install" in a configure_file '
+ 'is true')
+ cfile = mesonlib.File.from_built_file(ofile_path, ofile_fname)
+ install_mode = self._get_kwarg_install_mode(kwargs)
+ self.build.data.append(build.Data([cfile], idir, install_mode, self.subproject))
+ return mesonlib.File.from_built_file(self.subdir, output)
+
+ def extract_incdirs(self, kwargs):
+ prospectives = extract_as_list(kwargs, 'include_directories')
+ result = []
+ for p in prospectives:
+ if isinstance(p, build.IncludeDirs):
+ result.append(p)
+ elif isinstance(p, str):
+ result.append(self.build_incdir_object([p]))
+ else:
+ raise InterpreterException('Include directory objects can only be created from strings or include directories.')
+ return result
+
+ @permittedKwargs({'is_system'})
+ @stringArgs
+ def func_include_directories(self, node, args, kwargs):
+ return self.build_incdir_object(args, kwargs.get('is_system', False))
+
+ def build_incdir_object(self, incdir_strings: T.List[str], is_system: bool = False) -> build.IncludeDirs:
+ if not isinstance(is_system, bool):
+ raise InvalidArguments('Is_system must be boolean.')
+ src_root = self.environment.get_source_dir()
+ build_root = self.environment.get_build_dir()
+ absbase_src = os.path.join(src_root, self.subdir)
+ absbase_build = os.path.join(build_root, self.subdir)
+
+ for a in incdir_strings:
+ if a.startswith(src_root):
+ raise InvalidArguments(textwrap.dedent('''\
+ Tried to form an absolute path to a source dir.
+ You should not do that but use relative paths instead.
+
+ To get include path to any directory relative to the current dir do
+
+ incdir = include_directories(dirname)
+
+ After this incdir will contain both the current source dir as well as the
+ corresponding build dir. It can then be used in any subdirectory and
+ Meson will take care of all the busywork to make paths work.
+
+ Dirname can even be '.' to mark the current directory. Though you should
+ remember that the current source and build directories are always
+ put in the include directories by default so you only need to do
+ include_directories('.') if you intend to use the result in a
+ different subdirectory.
+ '''))
+ else:
+ try:
+ self.validate_within_subproject(self.subdir, a)
+ except InterpreterException:
+ mlog.warning('include_directories sandbox violation!')
+ print(textwrap.dedent(f'''\
+ The project is trying to access the directory {a} which belongs to a different
+ subproject. This is a problem as it hardcodes the relative paths of these two projeccts.
+ This makes it impossible to compile the project in any other directory layout and also
+ prevents the subproject from changing its own directory layout.
+
+ Instead of poking directly at the internals the subproject should be executed and
+ it should set a variable that the caller can then use. Something like:
+
+ # In subproject
+ some_dep = declare_depencency(include_directories: include_directories('include'))
+
+ # In parent project
+ some_dep = depencency('some')
+ executable(..., dependencies: [some_dep])
+
+ This warning will become a hard error in a future Meson release.
+ '''))
+ absdir_src = os.path.join(absbase_src, a)
+ absdir_build = os.path.join(absbase_build, a)
+ if not os.path.isdir(absdir_src) and not os.path.isdir(absdir_build):
+ raise InvalidArguments('Include dir %s does not exist.' % a)
+ i = build.IncludeDirs(self.subdir, incdir_strings, is_system)
+ return i
+
+ @permittedKwargs({'exe_wrapper', 'gdb', 'timeout_multiplier', 'env', 'is_default',
+ 'exclude_suites'})
+ @stringArgs
+ def func_add_test_setup(self, node, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Add_test_setup needs one argument for the setup name.')
+ setup_name = args[0]
+ if re.fullmatch('([_a-zA-Z][_0-9a-zA-Z]*:)?[_a-zA-Z][_0-9a-zA-Z]*', setup_name) is None:
+ raise InterpreterException('Setup name may only contain alphanumeric characters.')
+ if ":" not in setup_name:
+ setup_name = (self.subproject if self.subproject else self.build.project_name) + ":" + setup_name
+ try:
+ inp = extract_as_list(kwargs, 'exe_wrapper')
+ exe_wrapper = []
+ for i in inp:
+ if isinstance(i, str):
+ exe_wrapper.append(i)
+ elif isinstance(i, ExternalProgram):
+ if not i.found():
+ raise InterpreterException('Tried to use non-found executable.')
+ exe_wrapper += i.get_command()
+ else:
+ raise InterpreterException('Exe wrapper can only contain strings or external binaries.')
+ except KeyError:
+ exe_wrapper = None
+ gdb = kwargs.get('gdb', False)
+ if not isinstance(gdb, bool):
+ raise InterpreterException('Gdb option must be a boolean')
+ timeout_multiplier = kwargs.get('timeout_multiplier', 1)
+ if not isinstance(timeout_multiplier, int):
+ raise InterpreterException('Timeout multiplier must be a number.')
+ if timeout_multiplier <= 0:
+ FeatureNew('add_test_setup() timeout_multiplier <= 0', '0.57.0').use(self.subproject)
+ is_default = kwargs.get('is_default', False)
+ if not isinstance(is_default, bool):
+ raise InterpreterException('is_default option must be a boolean')
+ if is_default:
+ if self.build.test_setup_default_name is not None:
+ raise InterpreterException('\'%s\' is already set as default. '
+ 'is_default can be set to true only once' % self.build.test_setup_default_name)
+ self.build.test_setup_default_name = setup_name
+ exclude_suites = mesonlib.stringlistify(kwargs.get('exclude_suites', []))
+ env = self.unpack_env_kwarg(kwargs)
+ self.build.test_setups[setup_name] = build.TestSetup(exe_wrapper, gdb, timeout_multiplier, env,
+ exclude_suites)
+
+ @typed_pos_args('add_global_arguments', varargs=str)
+ @typed_kwargs('add_global_arguments', _NATIVE_KW, _LANGUAGE_KW)
+ def func_add_global_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwargs.FuncAddProjectArgs') -> None:
+ self._add_global_arguments(node, self.build.global_args[kwargs['native']], args[0], kwargs)
+
+ @typed_pos_args('add_global_link_arguments', varargs=str)
+ @typed_kwargs('add_global_arguments', _NATIVE_KW, _LANGUAGE_KW)
+ def func_add_global_link_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwargs.FuncAddProjectArgs') -> None:
+ self._add_global_arguments(node, self.build.global_link_args[kwargs['native']], args[0], kwargs)
+
+ @typed_pos_args('add_project_arguments', varargs=str)
+ @typed_kwargs('add_project_arguments', _NATIVE_KW, _LANGUAGE_KW)
+ def func_add_project_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwargs.FuncAddProjectArgs') -> None:
+ self._add_project_arguments(node, self.build.projects_args[kwargs['native']], args[0], kwargs)
+
+ @typed_pos_args('add_project_link_arguments', varargs=str)
+ @typed_kwargs('add_global_arguments', _NATIVE_KW, _LANGUAGE_KW)
+ def func_add_project_link_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwargs.FuncAddProjectArgs') -> None:
+ self._add_project_arguments(node, self.build.projects_link_args[kwargs['native']], args[0], kwargs)
+
+ def _warn_about_builtin_args(self, args: T.List[str]) -> None:
+ # -Wpedantic is deliberately not included, since some people want to use it but not use -Wextra
+ # see e.g.
+ # https://github.com/mesonbuild/meson/issues/3275#issuecomment-641354956
+ # https://github.com/mesonbuild/meson/issues/3742
+ warnargs = ('/W1', '/W2', '/W3', '/W4', '/Wall', '-Wall', '-Wextra')
+ optargs = ('-O0', '-O2', '-O3', '-Os', '/O1', '/O2', '/Os')
+ for arg in args:
+ if arg in warnargs:
+ mlog.warning(f'Consider using the built-in warning_level option instead of using "{arg}".',
+ location=self.current_node)
+ elif arg in optargs:
+ mlog.warning(f'Consider using the built-in optimization level instead of using "{arg}".',
+ location=self.current_node)
+ elif arg == '-Werror':
+ mlog.warning(f'Consider using the built-in werror option instead of using "{arg}".',
+ location=self.current_node)
+ elif arg == '-g':
+ mlog.warning(f'Consider using the built-in debug option instead of using "{arg}".',
+ location=self.current_node)
+ elif arg.startswith('-fsanitize'):
+ mlog.warning(f'Consider using the built-in option for sanitizers instead of using "{arg}".',
+ location=self.current_node)
+ elif arg.startswith('-std=') or arg.startswith('/std:'):
+ mlog.warning(f'Consider using the built-in option for language standard version instead of using "{arg}".',
+ location=self.current_node)
+
+ def _add_global_arguments(self, node: mparser.FunctionNode, argsdict: T.Dict[str, T.List[str]],
+ args: T.List[str], kwargs: 'kwargs.FuncAddProjectArgs') -> None:
+ if self.is_subproject():
+ msg = f'Function \'{node.func_name}\' cannot be used in subprojects because ' \
+ 'there is no way to make that reliable.\nPlease only call ' \
+ 'this if is_subproject() returns false. Alternatively, ' \
+ 'define a variable that\ncontains your language-specific ' \
+ 'arguments and add it to the appropriate *_args kwarg ' \
+ 'in each target.'
+ raise InvalidCode(msg)
+ frozen = self.project_args_frozen or self.global_args_frozen
+ self._add_arguments(node, argsdict, frozen, args, kwargs)
+
+ def _add_project_arguments(self, node: mparser.FunctionNode, argsdict: T.Dict[str, T.Dict[str, T.List[str]]],
+ args: T.List[str], kwargs: 'kwargs.FuncAddProjectArgs') -> None:
+ if self.subproject not in argsdict:
+ argsdict[self.subproject] = {}
+ self._add_arguments(node, argsdict[self.subproject],
+ self.project_args_frozen, args, kwargs)
+
+ def _add_arguments(self, node: mparser.FunctionNode, argsdict: T.Dict[str, T.List[str]],
+ args_frozen: bool, args: T.List[str], kwargs: 'kwargs.FuncAddProjectArgs') -> None:
+ if args_frozen:
+ msg = f'Tried to use \'{node.func_name}\' after a build target has been declared.\n' \
+ 'This is not permitted. Please declare all arguments before your targets.'
+ raise InvalidCode(msg)
+
+ self._warn_about_builtin_args(args)
+
+ for lang in kwargs['language']:
+ argsdict[lang] = argsdict.get(lang, []) + args
+
+ @noKwargs
+ @noArgsFlattening
+ def func_environment(self, node, args, kwargs):
+ if len(args) > 1:
+ raise InterpreterException('environment takes only one optional positional arguments')
+ elif len(args) == 1:
+ FeatureNew.single_use('environment positional arguments', '0.52.0', self.subproject)
+ initial_values = args[0]
+ if not isinstance(initial_values, dict) and not isinstance(initial_values, list):
+ raise InterpreterException('environment first argument must be a dictionary or a list')
+ else:
+ initial_values = {}
+ return EnvironmentVariablesObject(initial_values, self.subproject)
+
+ @stringArgs
+ @noKwargs
+ def func_join_paths(self, node, args, kwargs):
+ return self.join_path_strings(args)
+
+ def run(self) -> None:
+ super().run()
+ mlog.log('Build targets in project:', mlog.bold(str(len(self.build.targets))))
+ FeatureNew.report(self.subproject)
+ FeatureDeprecated.report(self.subproject)
+ if not self.is_subproject():
+ self.print_extra_warnings()
+ if self.subproject == '':
+ self._print_summary()
+
+ def print_extra_warnings(self) -> None:
+ # TODO cross compilation
+ for c in self.coredata.compilers.host.values():
+ if c.get_id() == 'clang':
+ self.check_clang_asan_lundef()
+ break
+
+ def check_clang_asan_lundef(self) -> None:
+ if OptionKey('b_lundef') not in self.coredata.options:
+ return
+ if OptionKey('b_sanitize') not in self.coredata.options:
+ return
+ if (self.coredata.options[OptionKey('b_lundef')].value and
+ self.coredata.options[OptionKey('b_sanitize')].value != 'none'):
+ mlog.warning('''Trying to use {} sanitizer on Clang with b_lundef.
+This will probably not work.
+Try setting b_lundef to false instead.'''.format(self.coredata.options[OptionKey('b_sanitize')].value),
+ location=self.current_node)
+
+ # Check that the indicated file is within the same subproject
+ # as we currently are. This is to stop people doing
+ # nasty things like:
+ #
+ # f = files('../../master_src/file.c')
+ #
+ # Note that this is validated only when the file
+ # object is generated. The result can be used in a different
+ # subproject than it is defined in (due to e.g. a
+ # declare_dependency).
+ def validate_within_subproject(self, subdir, fname):
+ srcdir = Path(self.environment.source_dir)
+ norm = Path(srcdir, subdir, fname).resolve()
+ if os.path.isdir(norm):
+ inputtype = 'directory'
+ else:
+ inputtype = 'file'
+ if srcdir not in norm.parents:
+ # Grabbing files outside the source tree is ok.
+ # This is for vendor stuff like:
+ #
+ # /opt/vendorsdk/src/file_with_license_restrictions.c
+ return
+ project_root = Path(srcdir, self.root_subdir)
+ if norm == project_root:
+ return
+ if project_root not in norm.parents:
+ raise InterpreterException(f'Sandbox violation: Tried to grab {inputtype} {norm.name} outside current (sub)project.')
+ if project_root / self.subproject_dir in norm.parents:
+ raise InterpreterException(f'Sandbox violation: Tried to grab {inputtype} {norm.name} from a nested subproject.')
+
+
+ @T.overload
+ def source_strings_to_files(self, sources: T.List['mesonlib.FileOrString']) -> T.List['mesonlib.File']: ...
+
+ def source_strings_to_files(self, sources: T.List['SourceInputs']) -> T.List['SourceOutputs']:
+ """Lower inputs to a list of Targets and Files, replacing any strings.
+
+ :param sources: A raw (Meson DSL) list of inputs (targets, files, and
+ strings)
+ :raises InterpreterException: if any of the inputs are of an invalid type
+ :return: A list of Targets and Files
+ """
+ mesonlib.check_direntry_issues(sources)
+ if not isinstance(sources, list):
+ sources = [sources]
+ results: T.List['SourceOutputs'] = []
+ for s in sources:
+ if isinstance(s, str):
+ self.validate_within_subproject(self.subdir, s)
+ results.append(mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, s))
+ elif isinstance(s, mesonlib.File):
+ results.append(s)
+ elif isinstance(s, (build.GeneratedList, build.BuildTarget,
+ build.CustomTargetIndex, build.CustomTarget,
+ build.GeneratedList)):
+ results.append(s)
+ else:
+ raise InterpreterException(f'Source item is {s!r} instead of '
+ 'string or File-type object')
+ return results
+
+ def add_target(self, name, tobj):
+ if name == '':
+ raise InterpreterException('Target name must not be empty.')
+ if name.strip() == '':
+ raise InterpreterException('Target name must not consist only of whitespace.')
+ if name.startswith('meson-'):
+ raise InvalidArguments("Target names starting with 'meson-' are reserved "
+ "for Meson's internal use. Please rename.")
+ if name in coredata.FORBIDDEN_TARGET_NAMES:
+ raise InvalidArguments("Target name '%s' is reserved for Meson's "
+ "internal use. Please rename." % name)
+ # To permit an executable and a shared library to have the
+ # same name, such as "foo.exe" and "libfoo.a".
+ idname = tobj.get_id()
+ if idname in self.build.targets:
+ raise InvalidCode('Tried to create target "%s", but a target of that name already exists.' % name)
+ self.build.targets[idname] = tobj
+ if idname not in self.coredata.target_guids:
+ self.coredata.target_guids[idname] = str(uuid.uuid4()).upper()
+
+ @FeatureNew('both_libraries', '0.46.0')
+ def build_both_libraries(self, node, args, kwargs):
+ shared_lib = self.build_target(node, args, kwargs, build.SharedLibrary)
+
+ # Check if user forces non-PIC static library.
+ pic = True
+ key = OptionKey('b_staticpic')
+ if 'pic' in kwargs:
+ pic = kwargs['pic']
+ elif key in self.environment.coredata.options:
+ pic = self.environment.coredata.options[key].value
+
+ if self.backend.name == 'xcode':
+ # Xcode is a bit special in that you can't (at least for the moment)
+ # form a library only from object file inputs. The simple but inefficient
+ # solution is to use the sources directly. This will lead to them being
+ # built twice. This is unfortunate and slow, but at least it works.
+ # Feel free to submit patches to get this fixed if it is an
+ # issue for you.
+ reuse_object_files = False
+ else:
+ reuse_object_files = pic
+
+ if reuse_object_files:
+ # Exclude sources from args and kwargs to avoid building them twice
+ static_args = [args[0]]
+ static_kwargs = kwargs.copy()
+ static_kwargs['sources'] = []
+ static_kwargs['objects'] = shared_lib.extract_all_objects()
+ else:
+ static_args = args
+ static_kwargs = kwargs
+
+ static_lib = self.build_target(node, static_args, static_kwargs, build.StaticLibrary)
+
+ return build.BothLibraries(shared_lib, static_lib)
+
+ def build_library(self, node, args, kwargs):
+ default_library = self.coredata.get_option(OptionKey('default_library', subproject=self.subproject))
+ if default_library == 'shared':
+ return self.build_target(node, args, kwargs, build.SharedLibrary)
+ elif default_library == 'static':
+ return self.build_target(node, args, kwargs, build.StaticLibrary)
+ elif default_library == 'both':
+ return self.build_both_libraries(node, args, kwargs)
+ else:
+ raise InterpreterException('Unknown default_library value: %s.', default_library)
+
+ def build_target(self, node, args, kwargs, targetclass):
+ @FeatureNewKwargs('build target', '0.42.0', ['rust_crate_type', 'build_rpath', 'implicit_include_directories'])
+ @FeatureNewKwargs('build target', '0.41.0', ['rust_args'])
+ @FeatureNewKwargs('build target', '0.40.0', ['build_by_default'])
+ @FeatureNewKwargs('build target', '0.48.0', ['gnu_symbol_visibility'])
+ def build_target_decorator_caller(self, node, args, kwargs):
+ return True
+
+ build_target_decorator_caller(self, node, args, kwargs)
+
+ if not args:
+ raise InterpreterException('Target does not have a name.')
+ name, *sources = args
+ for_machine = self.machine_from_native_kwarg(kwargs)
+ if 'sources' in kwargs:
+ sources += listify(kwargs['sources'])
+ sources = self.source_strings_to_files(sources)
+ objs = extract_as_list(kwargs, 'objects')
+ kwargs['dependencies'] = extract_as_list(kwargs, 'dependencies')
+ kwargs['install_mode'] = self._get_kwarg_install_mode(kwargs)
+ if 'extra_files' in kwargs:
+ ef = extract_as_list(kwargs, 'extra_files')
+ kwargs['extra_files'] = self.source_strings_to_files(ef)
+ self.check_sources_exist(os.path.join(self.source_root, self.subdir), sources)
+ if targetclass not in {build.Executable, build.SharedLibrary, build.SharedModule, build.StaticLibrary, build.Jar}:
+ mlog.debug('Unknown target type:', str(targetclass))
+ raise RuntimeError('Unreachable code')
+ self.kwarg_strings_to_includedirs(kwargs)
+
+ # Filter out kwargs from other target types. For example 'soversion'
+ # passed to library() when default_library == 'static'.
+ kwargs = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs}
+
+ kwargs['include_directories'] = self.extract_incdirs(kwargs)
+ target = targetclass(name, self.subdir, self.subproject, for_machine, sources, objs, self.environment, kwargs)
+ target.project_version = self.project_version
+
+ self.add_stdlib_info(target)
+ self.add_target(name, target)
+ self.project_args_frozen = True
+ return target
+
+ def kwarg_strings_to_includedirs(self, kwargs):
+ if 'd_import_dirs' in kwargs:
+ items = mesonlib.extract_as_list(kwargs, 'd_import_dirs')
+ cleaned_items = []
+ for i in items:
+ if isinstance(i, str):
+ # BW compatibility. This was permitted so we must support it
+ # for a few releases so people can transition to "correct"
+ # path declarations.
+ if os.path.normpath(i).startswith(self.environment.get_source_dir()):
+ mlog.warning('''Building a path to the source dir is not supported. Use a relative path instead.
+This will become a hard error in the future.''', location=self.current_node)
+ i = os.path.relpath(i, os.path.join(self.environment.get_source_dir(), self.subdir))
+ i = self.build_incdir_object([i])
+ cleaned_items.append(i)
+ kwargs['d_import_dirs'] = cleaned_items
+
+ def get_used_languages(self, target):
+ result = set()
+ for i in target.sources:
+ for lang, c in self.coredata.compilers[target.for_machine].items():
+ if c.can_compile(i):
+ result.add(lang)
+ break
+ return result
+
+ def add_stdlib_info(self, target):
+ for l in self.get_used_languages(target):
+ dep = self.build.stdlibs[target.for_machine].get(l, None)
+ if dep:
+ target.add_deps(dep)
+
+ def check_sources_exist(self, subdir, sources):
+ for s in sources:
+ if not isinstance(s, str):
+ continue # This means a generated source and they always exist.
+ fname = os.path.join(subdir, s)
+ if not os.path.isfile(fname):
+ raise InterpreterException('Tried to add non-existing source file %s.' % s)
+
+ # Only permit object extraction from the same subproject
+ def validate_extraction(self, buildtarget: mesonlib.HoldableObject) -> None:
+ if self.subproject != buildtarget.subproject:
+ raise InterpreterException('Tried to extract objects from a different subproject.')
+
+ def is_subproject(self):
+ return self.subproject != ''
+
+ @noKwargs
+ @noArgsFlattening
+ @noSecondLevelHolderResolving
+ def func_set_variable(self, node, args, kwargs):
+ if len(args) != 2:
+ raise InvalidCode('Set_variable takes two arguments.')
+ varname, value = args
+ self.set_variable(varname, value, holderify=True)
+
+ @noKwargs
+ @noArgsFlattening
+ @permissive_unholder_return
+ def func_get_variable(self, node, args, kwargs):
+ if len(args) < 1 or len(args) > 2:
+ raise InvalidCode('Get_variable takes one or two arguments.')
+ varname = args[0]
+ if isinstance(varname, Disabler):
+ return varname
+ if not isinstance(varname, str):
+ raise InterpreterException('First argument must be a string.')
+ try:
+ return self.variables[varname]
+ except KeyError:
+ pass
+ if len(args) == 2:
+ return args[1]
+ raise InterpreterException('Tried to get unknown variable "%s".' % varname)
+
+ @stringArgs
+ @noKwargs
+ def func_is_variable(self, node, args, kwargs):
+ if len(args) != 1:
+ raise InvalidCode('Is_variable takes two arguments.')
+ varname = args[0]
+ return varname in self.variables
+
+ @staticmethod
+ def machine_from_native_kwarg(kwargs: T.Dict[str, T.Any]) -> MachineChoice:
+ native = kwargs.get('native', False)
+ if not isinstance(native, bool):
+ raise InvalidArguments('Argument to "native" must be a boolean.')
+ return MachineChoice.BUILD if native else MachineChoice.HOST
+
+ @FeatureNew('is_disabler', '0.52.0')
+ @noKwargs
+ def func_is_disabler(self, node, args, kwargs):
+ if len(args) != 1:
+ raise InvalidCode('Is_disabler takes one argument.')
+ varname = args[0]
+ return isinstance(varname, Disabler)
+
+ @noKwargs
+ @FeatureNew('range', '0.58.0')
+ @typed_pos_args('range', int, optargs=[int, int])
+ def func_range(self, node, args: T.Tuple[int, T.Optional[int], T.Optional[int]], kwargs: T.Dict[str, T.Any]) -> RangeHolder:
+ start, stop, step = args
+ # Just like Python's range, we allow range(stop), range(start, stop), or
+ # range(start, stop, step)
+ if stop is None:
+ stop = start
+ start = 0
+ if step is None:
+ step = 1
+ # This is more strict than Python's range()
+ if start < 0:
+ raise InterpreterException('start cannot be negative')
+ if stop < start:
+ raise InterpreterException('stop cannot be less than start')
+ if step < 1:
+ raise InterpreterException('step must be >=1')
+ return RangeHolder(start, stop, step, subproject=self.subproject)
diff --git a/meson/mesonbuild/interpreter/interpreterobjects.py b/meson/mesonbuild/interpreter/interpreterobjects.py
new file mode 100644
index 000000000..5dc65d03f
--- /dev/null
+++ b/meson/mesonbuild/interpreter/interpreterobjects.py
@@ -0,0 +1,996 @@
+import os
+import shlex
+import subprocess
+import copy
+import textwrap
+
+from pathlib import Path, PurePath
+
+from .. import mesonlib
+from .. import coredata
+from .. import build
+from .. import mlog
+
+from ..modules import ModuleReturnValue, ModuleObject, ModuleState, ExtensionModule
+from ..backend.backends import TestProtocol
+from ..interpreterbase import (
+ ContainerTypeInfo, KwargInfo,
+ InterpreterObject, MesonInterpreterObject, ObjectHolder, MutableInterpreterObject,
+ FeatureCheckBase, FeatureNewKwargs, FeatureNew, FeatureDeprecated,
+ typed_pos_args, typed_kwargs, stringArgs, permittedKwargs,
+ noArgsFlattening, noPosargs, noKwargs, permissive_unholder_return, TYPE_var, TYPE_kwargs, TYPE_nvar, TYPE_nkwargs,
+ flatten, resolve_second_level_holders, InterpreterException, InvalidArguments, InvalidCode)
+from ..dependencies import Dependency, ExternalLibrary, InternalDependency
+from ..programs import ExternalProgram
+from ..mesonlib import HoldableObject, MesonException, OptionKey, listify, Popen_safe
+
+import typing as T
+
+if T.TYPE_CHECKING:
+ from . import kwargs
+ from .interpreter import Interpreter
+ from ..environment import Environment
+ from ..envconfig import MachineInfo
+
+
+def extract_required_kwarg(kwargs: 'kwargs.ExtractRequired',
+ subproject: str,
+ feature_check: T.Optional[FeatureCheckBase] = None,
+ default: bool = True) -> T.Tuple[bool, bool, T.Optional[str]]:
+ val = kwargs.get('required', default)
+ disabled = False
+ required = False
+ feature: T.Optional[str] = None
+ if isinstance(val, coredata.UserFeatureOption):
+ if not feature_check:
+ feature_check = FeatureNew('User option "feature"', '0.47.0')
+ feature_check.use(subproject)
+ feature = val.name
+ if val.is_disabled():
+ disabled = True
+ elif val.is_enabled():
+ required = True
+ elif isinstance(val, bool):
+ required = val
+ else:
+ raise InterpreterException('required keyword argument must be boolean or a feature option')
+
+ # Keep boolean value in kwargs to simplify other places where this kwarg is
+ # checked.
+ # TODO: this should be removed, and those callers should learn about FeatureOptions
+ kwargs['required'] = required
+
+ return disabled, required, feature
+
+def extract_search_dirs(kwargs: T.Dict[str, T.Any]) -> T.List[str]:
+ search_dirs_str = mesonlib.stringlistify(kwargs.get('dirs', []))
+ search_dirs = [Path(d).expanduser() for d in search_dirs_str]
+ for d in search_dirs:
+ if mesonlib.is_windows() and d.root.startswith('\\'):
+ # a Unix-path starting with `/` that is not absolute on Windows.
+ # discard without failing for end-user ease of cross-platform directory arrays
+ continue
+ if not d.is_absolute():
+ raise InvalidCode(f'Search directory {d} is not an absolute path.')
+ return list(map(str, search_dirs))
+
+class FeatureOptionHolder(ObjectHolder[coredata.UserFeatureOption]):
+ def __init__(self, option: coredata.UserFeatureOption, interpreter: 'Interpreter'):
+ super().__init__(option, interpreter)
+ if option and option.is_auto():
+ # TODO: we need to case here because options is not a TypedDict
+ self.held_object = T.cast(coredata.UserFeatureOption, self.env.coredata.options[OptionKey('auto_features')])
+ self.held_object.name = option.name
+ self.methods.update({'enabled': self.enabled_method,
+ 'disabled': self.disabled_method,
+ 'allowed': self.allowed_method,
+ 'auto': self.auto_method,
+ 'require': self.require_method,
+ 'disable_auto_if': self.disable_auto_if_method,
+ })
+
+ @property
+ def value(self) -> str:
+ return 'disabled' if not self.held_object else self.held_object.value
+
+ def as_disabled(self) -> coredata.UserFeatureOption:
+ disabled = copy.deepcopy(self.held_object)
+ disabled.value = 'disabled'
+ return disabled
+
+ @noPosargs
+ @noKwargs
+ def enabled_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ return self.value == 'enabled'
+
+ @noPosargs
+ @noKwargs
+ def disabled_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ return self.value == 'disabled'
+
+ @noPosargs
+ @noKwargs
+ def allowed_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ return not self.value == 'disabled'
+
+ @noPosargs
+ @noKwargs
+ def auto_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ return self.value == 'auto'
+
+ @permittedKwargs({'error_message'})
+ def require_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> coredata.UserFeatureOption:
+ if len(args) != 1:
+ raise InvalidArguments('Expected 1 argument, got %d.' % (len(args), ))
+ if not isinstance(args[0], bool):
+ raise InvalidArguments('boolean argument expected.')
+ error_message = kwargs.pop('error_message', '')
+ if error_message and not isinstance(error_message, str):
+ raise InterpreterException("Error message must be a string.")
+ if args[0]:
+ return copy.deepcopy(self.held_object)
+
+ assert isinstance(error_message, str)
+ if self.value == 'enabled':
+ prefix = f'Feature {self.held_object.name} cannot be enabled'
+ prefix = prefix + ': ' if error_message else ''
+ raise InterpreterException(prefix + error_message)
+ return self.as_disabled()
+
+ @noKwargs
+ def disable_auto_if_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> coredata.UserFeatureOption:
+ if len(args) != 1:
+ raise InvalidArguments('Expected 1 argument, got %d.' % (len(args), ))
+ if not isinstance(args[0], bool):
+ raise InvalidArguments('boolean argument expected.')
+ return copy.deepcopy(self.held_object) if self.value != 'auto' or not args[0] else self.as_disabled()
+
+
+class RunProcess(MesonInterpreterObject):
+
+ def __init__(self,
+ cmd: ExternalProgram,
+ args: T.List[str],
+ env: build.EnvironmentVariables,
+ source_dir: str,
+ build_dir: str,
+ subdir: str,
+ mesonintrospect: T.List[str],
+ in_builddir: bool = False,
+ check: bool = False,
+ capture: bool = True) -> None:
+ super().__init__()
+ if not isinstance(cmd, ExternalProgram):
+ raise AssertionError('BUG: RunProcess must be passed an ExternalProgram')
+ self.capture = capture
+ self.returncode, self.stdout, self.stderr = self.run_command(cmd, args, env, source_dir, build_dir, subdir, mesonintrospect, in_builddir, check)
+ self.methods.update({'returncode': self.returncode_method,
+ 'stdout': self.stdout_method,
+ 'stderr': self.stderr_method,
+ })
+
+ def run_command(self,
+ cmd: ExternalProgram,
+ args: T.List[str],
+ env: build.EnvironmentVariables,
+ source_dir: str,
+ build_dir: str,
+ subdir: str,
+ mesonintrospect: T.List[str],
+ in_builddir: bool,
+ check: bool = False) -> T.Tuple[int, str, str]:
+ command_array = cmd.get_command() + args
+ menv = {'MESON_SOURCE_ROOT': source_dir,
+ 'MESON_BUILD_ROOT': build_dir,
+ 'MESON_SUBDIR': subdir,
+ 'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in mesonintrospect]),
+ }
+ if in_builddir:
+ cwd = os.path.join(build_dir, subdir)
+ else:
+ cwd = os.path.join(source_dir, subdir)
+ child_env = os.environ.copy()
+ child_env.update(menv)
+ child_env = env.get_env(child_env)
+ stdout = subprocess.PIPE if self.capture else subprocess.DEVNULL
+ mlog.debug('Running command:', ' '.join(command_array))
+ try:
+ p, o, e = Popen_safe(command_array, stdout=stdout, env=child_env, cwd=cwd)
+ if self.capture:
+ mlog.debug('--- stdout ---')
+ mlog.debug(o)
+ else:
+ o = ''
+ mlog.debug('--- stdout disabled ---')
+ mlog.debug('--- stderr ---')
+ mlog.debug(e)
+ mlog.debug('')
+
+ if check and p.returncode != 0:
+ raise InterpreterException('Command "{}" failed with status {}.'.format(' '.join(command_array), p.returncode))
+
+ return p.returncode, o, e
+ except FileNotFoundError:
+ raise InterpreterException('Could not execute command "%s".' % ' '.join(command_array))
+
+ @noPosargs
+ @noKwargs
+ def returncode_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int:
+ return self.returncode
+
+ @noPosargs
+ @noKwargs
+ def stdout_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.stdout
+
+ @noPosargs
+ @noKwargs
+ def stderr_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.stderr
+
+# TODO: Parsing the initial values should be either done directly in the
+# `Interpreter` or in `build.EnvironmentVariables`. This way, this class
+# can be converted into a pure object holder.
+class EnvironmentVariablesObject(MutableInterpreterObject, MesonInterpreterObject):
+ # TODO: Move the type cheking for initial_values out of this class and replace T.Any
+ def __init__(self, initial_values: T.Optional[T.Any] = None, subproject: str = ''):
+ super().__init__(subproject=subproject)
+ self.vars = build.EnvironmentVariables()
+ self.methods.update({'set': self.set_method,
+ 'append': self.append_method,
+ 'prepend': self.prepend_method,
+ })
+ if isinstance(initial_values, dict):
+ for k, v in initial_values.items():
+ self.set_method([k, v], {})
+ elif initial_values is not None:
+ for e in mesonlib.listify(initial_values):
+ if not isinstance(e, str):
+ raise InterpreterException('Env var definition must be a list of strings.')
+ if '=' not in e:
+ raise InterpreterException('Env var definition must be of type key=val.')
+ (k, val) = e.split('=', 1)
+ k = k.strip()
+ val = val.strip()
+ if ' ' in k:
+ raise InterpreterException('Env var key must not have spaces in it.')
+ self.set_method([k, val], {})
+
+ def __repr__(self) -> str:
+ repr_str = "<{0}: {1}>"
+ return repr_str.format(self.__class__.__name__, self.vars.envvars)
+
+ def unpack_separator(self, kwargs: T.Dict[str, T.Any]) -> str:
+ separator = kwargs.get('separator', os.pathsep)
+ if not isinstance(separator, str):
+ raise InterpreterException("EnvironmentVariablesObject methods 'separator'"
+ " argument needs to be a string.")
+ return separator
+
+ def warn_if_has_name(self, name: str) -> None:
+ # Multiple append/prepend operations was not supported until 0.58.0.
+ if self.vars.has_name(name):
+ m = f'Overriding previous value of environment variable {name!r} with a new one'
+ FeatureNew('0.58.0', m).use(self.subproject)
+
+ @stringArgs
+ @permittedKwargs({'separator'})
+ @typed_pos_args('environment.set', str, varargs=str, min_varargs=1)
+ def set_method(self, args: T.Tuple[str, T.List[str]], kwargs: T.Dict[str, T.Any]) -> None:
+ name, values = args
+ separator = self.unpack_separator(kwargs)
+ self.vars.set(name, values, separator)
+
+ @stringArgs
+ @permittedKwargs({'separator'})
+ @typed_pos_args('environment.append', str, varargs=str, min_varargs=1)
+ def append_method(self, args: T.Tuple[str, T.List[str]], kwargs: T.Dict[str, T.Any]) -> None:
+ name, values = args
+ separator = self.unpack_separator(kwargs)
+ self.warn_if_has_name(name)
+ self.vars.append(name, values, separator)
+
+ @stringArgs
+ @permittedKwargs({'separator'})
+ @typed_pos_args('environment.prepend', str, varargs=str, min_varargs=1)
+ def prepend_method(self, args: T.Tuple[str, T.List[str]], kwargs: T.Dict[str, T.Any]) -> None:
+ name, values = args
+ separator = self.unpack_separator(kwargs)
+ self.warn_if_has_name(name)
+ self.vars.prepend(name, values, separator)
+
+
+class ConfigurationDataObject(MutableInterpreterObject, MesonInterpreterObject):
+ def __init__(self, subproject: str, initial_values: T.Optional[T.Dict[str, T.Any]] = None) -> None:
+ self.used = False # These objects become immutable after use in configure_file.
+ super().__init__(subproject=subproject)
+ self.conf_data = build.ConfigurationData()
+ self.methods.update({'set': self.set_method,
+ 'set10': self.set10_method,
+ 'set_quoted': self.set_quoted_method,
+ 'has': self.has_method,
+ 'get': self.get_method,
+ 'keys': self.keys_method,
+ 'get_unquoted': self.get_unquoted_method,
+ 'merge_from': self.merge_from_method,
+ })
+ if isinstance(initial_values, dict):
+ for k, v in initial_values.items():
+ self.set_method([k, v], {})
+ elif initial_values:
+ raise AssertionError('Unsupported ConfigurationDataObject initial_values')
+
+ def is_used(self) -> bool:
+ return self.used
+
+ def mark_used(self) -> None:
+ self.used = True
+
+ def validate_args(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Tuple[str, T.Union[str, int, bool], T.Optional[str]]:
+ if len(args) == 1 and isinstance(args[0], list) and len(args[0]) == 2:
+ mlog.deprecation('Passing a list as the single argument to '
+ 'configuration_data.set is deprecated. This will '
+ 'become a hard error in the future.',
+ location=self.current_node)
+ args = args[0]
+
+ if len(args) != 2:
+ raise InterpreterException("Configuration set requires 2 arguments.")
+ if self.used:
+ raise InterpreterException("Can not set values on configuration object that has been used.")
+ name, val = args
+ if not isinstance(val, (int, str)):
+ msg = f'Setting a configuration data value to {val!r} is invalid, ' \
+ 'and will fail at configure_file(). If you are using it ' \
+ 'just to store some values, please use a dict instead.'
+ mlog.deprecation(msg, location=self.current_node)
+ desc = kwargs.get('description', None)
+ if not isinstance(name, str):
+ raise InterpreterException("First argument to set must be a string.")
+ if desc is not None and not isinstance(desc, str):
+ raise InterpreterException('Description must be a string.')
+
+ # TODO: Remove the cast once we get rid of the deprecation
+ return name, T.cast(T.Union[str, bool, int], val), desc
+
+ @noArgsFlattening
+ def set_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> None:
+ (name, val, desc) = self.validate_args(args, kwargs)
+ self.conf_data.values[name] = (val, desc)
+
+ def set_quoted_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> None:
+ (name, val, desc) = self.validate_args(args, kwargs)
+ if not isinstance(val, str):
+ raise InterpreterException("Second argument to set_quoted must be a string.")
+ escaped_val = '\\"'.join(val.split('"'))
+ self.conf_data.values[name] = ('"' + escaped_val + '"', desc)
+
+ def set10_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> None:
+ (name, val, desc) = self.validate_args(args, kwargs)
+ if val:
+ self.conf_data.values[name] = (1, desc)
+ else:
+ self.conf_data.values[name] = (0, desc)
+
+ def has_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ return args[0] in self.conf_data.values
+
+ @FeatureNew('configuration_data.get()', '0.38.0')
+ @noArgsFlattening
+ def get_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Union[str, int, bool]:
+ if len(args) < 1 or len(args) > 2:
+ raise InterpreterException('Get method takes one or two arguments.')
+ if not isinstance(args[0], str):
+ raise InterpreterException('The variable name must be a string.')
+ name = args[0]
+ if name in self.conf_data:
+ return self.conf_data.get(name)[0]
+ if len(args) > 1:
+ # Assertion does not work because setting other values is still
+ # supported, but deprecated. Use T.cast in the meantime (even though
+ # this is a lie).
+ # TODO: Fix this once the deprecation is removed
+ # assert isinstance(args[1], (int, str, bool))
+ return T.cast(T.Union[str, int, bool], args[1])
+ raise InterpreterException('Entry %s not in configuration data.' % name)
+
+ @FeatureNew('configuration_data.get_unquoted()', '0.44.0')
+ def get_unquoted_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Union[str, int, bool]:
+ if len(args) < 1 or len(args) > 2:
+ raise InterpreterException('Get method takes one or two arguments.')
+ if not isinstance(args[0], str):
+ raise InterpreterException('The variable name must be a string.')
+ name = args[0]
+ if name in self.conf_data:
+ val = self.conf_data.get(name)[0]
+ elif len(args) > 1:
+ assert isinstance(args[1], (str, int, bool))
+ val = args[1]
+ else:
+ raise InterpreterException('Entry %s not in configuration data.' % name)
+ if isinstance(val, str) and val[0] == '"' and val[-1] == '"':
+ return val[1:-1]
+ return val
+
+ def get(self, name: str) -> T.Tuple[T.Union[str, int, bool], T.Optional[str]]:
+ return self.conf_data.values[name]
+
+ @FeatureNew('configuration_data.keys()', '0.57.0')
+ @noPosargs
+ def keys_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[str]:
+ return sorted(self.keys())
+
+ def keys(self) -> T.List[str]:
+ return list(self.conf_data.values.keys())
+
+ def merge_from_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> None:
+ if len(args) != 1:
+ raise InterpreterException('Merge_from takes one positional argument.')
+ from_object_holder = args[0]
+ if not isinstance(from_object_holder, ConfigurationDataObject):
+ raise InterpreterException('Merge_from argument must be a configuration data object.')
+ from_object = from_object_holder.conf_data
+ for k, v in from_object.values.items():
+ self.conf_data.values[k] = v
+
+
+_PARTIAL_DEP_KWARGS = [
+ KwargInfo('compile_args', bool, default=False),
+ KwargInfo('link_args', bool, default=False),
+ KwargInfo('links', bool, default=False),
+ KwargInfo('includes', bool, default=False),
+ KwargInfo('sources', bool, default=False),
+]
+
+class DependencyHolder(ObjectHolder[Dependency]):
+ def __init__(self, dep: Dependency, interpreter: 'Interpreter'):
+ super().__init__(dep, interpreter)
+ self.methods.update({'found': self.found_method,
+ 'type_name': self.type_name_method,
+ 'version': self.version_method,
+ 'name': self.name_method,
+ 'get_pkgconfig_variable': self.pkgconfig_method,
+ 'get_configtool_variable': self.configtool_method,
+ 'get_variable': self.variable_method,
+ 'partial_dependency': self.partial_dependency_method,
+ 'include_type': self.include_type_method,
+ 'as_system': self.as_system_method,
+ 'as_link_whole': self.as_link_whole_method,
+ })
+
+ def found(self) -> bool:
+ return self.found_method([], {})
+
+ @noPosargs
+ @noKwargs
+ def type_name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.type_name
+
+ @noPosargs
+ @noKwargs
+ def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ if self.held_object.type_name == 'internal':
+ return True
+ return self.held_object.found()
+
+ @noPosargs
+ @noKwargs
+ def version_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.get_version()
+
+ @noPosargs
+ @noKwargs
+ def name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.get_name()
+
+ @FeatureDeprecated('Dependency.get_pkgconfig_variable', '0.56.0',
+ 'use Dependency.get_variable(pkgconfig : ...) instead')
+ @permittedKwargs({'define_variable', 'default'})
+ def pkgconfig_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ args = listify(args)
+ if len(args) != 1:
+ raise InterpreterException('get_pkgconfig_variable takes exactly one argument.')
+ varname = args[0]
+ if not isinstance(varname, str):
+ raise InterpreterException('Variable name must be a string.')
+ return self.held_object.get_pkgconfig_variable(varname, kwargs)
+
+ @FeatureNew('dep.get_configtool_variable', '0.44.0')
+ @FeatureDeprecated('Dependency.get_configtool_variable', '0.56.0',
+ 'use Dependency.get_variable(configtool : ...) instead')
+ @noKwargs
+ def configtool_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ args = listify(args)
+ if len(args) != 1:
+ raise InterpreterException('get_configtool_variable takes exactly one argument.')
+ varname = args[0]
+ if not isinstance(varname, str):
+ raise InterpreterException('Variable name must be a string.')
+ return self.held_object.get_configtool_variable(varname)
+
+ @FeatureNew('dep.partial_dependency', '0.46.0')
+ @noPosargs
+ @typed_kwargs('dep.partial_dependency', *_PARTIAL_DEP_KWARGS)
+ def partial_dependency_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.DependencyMethodPartialDependency') -> Dependency:
+ pdep = self.held_object.get_partial_dependency(**kwargs)
+ return pdep
+
+ @FeatureNew('dep.get_variable', '0.51.0')
+ @typed_pos_args('dep.get_variable', optargs=[str])
+ @permittedKwargs({'cmake', 'pkgconfig', 'configtool', 'internal', 'default_value', 'pkgconfig_define'})
+ @FeatureNewKwargs('dep.get_variable', '0.54.0', ['internal'])
+ def variable_method(self, args: T.Tuple[T.Optional[str]], kwargs: T.Dict[str, T.Any]) -> T.Union[str, T.List[str]]:
+ default_varname = args[0]
+ if default_varname is not None:
+ FeatureNew('0.58.0', 'Positional argument to dep.get_variable()').use(self.subproject)
+ for k in ['cmake', 'pkgconfig', 'configtool', 'internal']:
+ kwargs.setdefault(k, default_varname)
+ return self.held_object.get_variable(**kwargs)
+
+ @FeatureNew('dep.include_type', '0.52.0')
+ @noPosargs
+ @noKwargs
+ def include_type_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.get_include_type()
+
+ @FeatureNew('dep.as_system', '0.52.0')
+ @noKwargs
+ def as_system_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> Dependency:
+ args = listify(args)
+ new_is_system = 'system'
+ if len(args) > 1:
+ raise InterpreterException('as_system takes only one optional value')
+ if len(args) == 1:
+ if not isinstance(args[0], str):
+ raise InterpreterException('as_system takes exactly one string parameter')
+ new_is_system = args[0]
+ new_dep = self.held_object.generate_system_dependency(new_is_system)
+ return new_dep
+
+ @FeatureNew('dep.as_link_whole', '0.56.0')
+ @noKwargs
+ @noPosargs
+ def as_link_whole_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> Dependency:
+ if not isinstance(self.held_object, InternalDependency):
+ raise InterpreterException('as_link_whole method is only supported on declare_dependency() objects')
+ new_dep = self.held_object.generate_link_whole_dependency()
+ return new_dep
+
+class ExternalProgramHolder(ObjectHolder[ExternalProgram]):
+ def __init__(self, ep: ExternalProgram, interpreter: 'Interpreter') -> None:
+ super().__init__(ep, interpreter)
+ self.methods.update({'found': self.found_method,
+ 'path': self.path_method,
+ 'full_path': self.full_path_method})
+
+ @noPosargs
+ @noKwargs
+ def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ return self.found()
+
+ @noPosargs
+ @noKwargs
+ @FeatureDeprecated('ExternalProgram.path', '0.55.0',
+ 'use ExternalProgram.full_path() instead')
+ def path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self._full_path()
+
+ @noPosargs
+ @noKwargs
+ @FeatureNew('ExternalProgram.full_path', '0.55.0')
+ def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self._full_path()
+
+ def _full_path(self) -> str:
+ if not self.found():
+ raise InterpreterException('Unable to get the path of a not-found external program')
+ path = self.held_object.get_path()
+ assert path is not None
+ return path
+
+ def found(self) -> bool:
+ return self.held_object.found()
+
+class ExternalLibraryHolder(ObjectHolder[ExternalLibrary]):
+ def __init__(self, el: ExternalLibrary, interpreter: 'Interpreter'):
+ super().__init__(el, interpreter)
+ self.methods.update({'found': self.found_method,
+ 'type_name': self.type_name_method,
+ 'partial_dependency': self.partial_dependency_method,
+ })
+
+ @noPosargs
+ @noKwargs
+ def type_name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.type_name
+
+ @noPosargs
+ @noKwargs
+ def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ return self.held_object.found()
+
+ @FeatureNew('dep.partial_dependency', '0.46.0')
+ @noPosargs
+ @typed_kwargs('dep.partial_dependency', *_PARTIAL_DEP_KWARGS)
+ def partial_dependency_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.DependencyMethodPartialDependency') -> Dependency:
+ pdep = self.held_object.get_partial_dependency(**kwargs)
+ return pdep
+
+# A machine that's statically known from the cross file
+class MachineHolder(ObjectHolder['MachineInfo']):
+ def __init__(self, machine_info: 'MachineInfo', interpreter: 'Interpreter'):
+ super().__init__(machine_info, interpreter)
+ self.methods.update({'system': self.system_method,
+ 'cpu': self.cpu_method,
+ 'cpu_family': self.cpu_family_method,
+ 'endian': self.endian_method,
+ })
+
+ @noPosargs
+ @noKwargs
+ def cpu_family_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.cpu_family
+
+ @noPosargs
+ @noKwargs
+ def cpu_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.cpu
+
+ @noPosargs
+ @noKwargs
+ def system_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.system
+
+ @noPosargs
+ @noKwargs
+ def endian_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.held_object.endian
+
+class IncludeDirsHolder(ObjectHolder[build.IncludeDirs]):
+ pass
+
+class FileHolder(ObjectHolder[mesonlib.File]):
+ pass
+
+class HeadersHolder(ObjectHolder[build.Headers]):
+ pass
+
+class DataHolder(ObjectHolder[build.Data]):
+ pass
+
+class InstallDirHolder(ObjectHolder[build.InstallDir]):
+ pass
+
+class ManHolder(ObjectHolder[build.Man]):
+ pass
+
+class GeneratedObjectsHolder(ObjectHolder[build.ExtractedObjects]):
+ pass
+
+class Test(MesonInterpreterObject):
+ def __init__(self, name: str, project: str, suite: T.List[str], exe: build.Executable,
+ depends: T.List[T.Union[build.CustomTarget, build.BuildTarget]],
+ is_parallel: bool, cmd_args: T.List[str], env: build.EnvironmentVariables,
+ should_fail: bool, timeout: int, workdir: T.Optional[str], protocol: str,
+ priority: int):
+ super().__init__()
+ self.name = name
+ self.suite = listify(suite)
+ self.project_name = project
+ self.exe = exe
+ self.depends = depends
+ self.is_parallel = is_parallel
+ self.cmd_args = cmd_args
+ self.env = env
+ self.should_fail = should_fail
+ self.timeout = timeout
+ self.workdir = workdir
+ self.protocol = TestProtocol.from_str(protocol)
+ self.priority = priority
+
+ def get_exe(self) -> build.Executable:
+ return self.exe
+
+ def get_name(self) -> str:
+ return self.name
+
+class NullSubprojectInterpreter(HoldableObject):
+ pass
+
+# TODO: This should really be an `ObjectHolder`, but the additional stuff in this
+# class prevents this. Thus, this class should be split into a pure
+# `ObjectHolder` and a class specifically for stroing in `Interpreter`.
+class SubprojectHolder(MesonInterpreterObject):
+
+ def __init__(self, subinterpreter: T.Union['Interpreter', NullSubprojectInterpreter],
+ subdir: str,
+ warnings: int = 0,
+ disabled_feature: T.Optional[str] = None,
+ exception: T.Optional[MesonException] = None) -> None:
+ super().__init__()
+ self.held_object = subinterpreter
+ self.warnings = warnings
+ self.disabled_feature = disabled_feature
+ self.exception = exception
+ self.subdir = PurePath(subdir).as_posix()
+ self.methods.update({'get_variable': self.get_variable_method,
+ 'found': self.found_method,
+ })
+
+ @noPosargs
+ @noKwargs
+ def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ return self.found()
+
+ def found(self) -> bool:
+ return not isinstance(self.held_object, NullSubprojectInterpreter)
+
+ @noKwargs
+ @noArgsFlattening
+ @permissive_unholder_return
+ def get_variable_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Union[TYPE_var, InterpreterObject]:
+ if len(args) < 1 or len(args) > 2:
+ raise InterpreterException('Get_variable takes one or two arguments.')
+ if isinstance(self.held_object, NullSubprojectInterpreter): # == not self.found()
+ raise InterpreterException('Subproject "%s" disabled can\'t get_variable on it.' % (self.subdir))
+ varname = args[0]
+ if not isinstance(varname, str):
+ raise InterpreterException('Get_variable first argument must be a string.')
+ try:
+ return self.held_object.variables[varname]
+ except KeyError:
+ pass
+
+ if len(args) == 2:
+ return args[1]
+
+ raise InvalidArguments(f'Requested variable "{varname}" not found.')
+
+class ModuleObjectHolder(ObjectHolder[ModuleObject]):
+ def method_call(self, method_name: str, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> TYPE_var:
+ modobj = self.held_object
+ method = modobj.methods.get(method_name)
+ if not method:
+ raise InvalidCode(f'Unknown method {method_name!r} in object.')
+ if not getattr(method, 'no-args-flattening', False):
+ args = flatten(args)
+ if not getattr(method, 'no-second-level-holder-flattening', False):
+ args, kwargs = resolve_second_level_holders(args, kwargs)
+ state = ModuleState(self.interpreter)
+ # Many modules do for example self.interpreter.find_program_impl(),
+ # so we have to ensure they use the current interpreter and not the one
+ # that first imported that module, otherwise it will use outdated
+ # overrides.
+ if isinstance(modobj, ExtensionModule):
+ modobj.interpreter = self.interpreter
+ ret = method(state, args, kwargs)
+ if isinstance(ret, ModuleReturnValue):
+ self.interpreter.process_new_values(ret.new_objects)
+ ret = ret.return_value
+ return ret
+
+class MutableModuleObjectHolder(ModuleObjectHolder, MutableInterpreterObject):
+ def __deepcopy__(self, memo: T.Dict[int, T.Any]) -> 'MutableModuleObjectHolder':
+ # Deepcopy only held object, not interpreter
+ modobj = copy.deepcopy(self.held_object, memo)
+ return MutableModuleObjectHolder(modobj, self.interpreter)
+
+
+_BuildTarget = T.TypeVar('_BuildTarget', bound=T.Union[build.BuildTarget, build.BothLibraries])
+
+class BuildTargetHolder(ObjectHolder[_BuildTarget]):
+ def __init__(self, target: _BuildTarget, interp: 'Interpreter'):
+ super().__init__(target, interp)
+ self.methods.update({'extract_objects': self.extract_objects_method,
+ 'extract_all_objects': self.extract_all_objects_method,
+ 'name': self.name_method,
+ 'get_id': self.get_id_method,
+ 'outdir': self.outdir_method,
+ 'full_path': self.full_path_method,
+ 'path': self.path_method,
+ 'found': self.found_method,
+ 'private_dir_include': self.private_dir_include_method,
+ })
+
+ def __repr__(self) -> str:
+ r = '<{} {}: {}>'
+ h = self.held_object
+ return r.format(self.__class__.__name__, h.get_id(), h.filename)
+
+ @property
+ def _target_object(self) -> build.BuildTarget:
+ if isinstance(self.held_object, build.BothLibraries):
+ return self.held_object.get_default_object()
+ assert isinstance(self.held_object, build.BuildTarget)
+ return self.held_object
+
+ def is_cross(self) -> bool:
+ return not self._target_object.environment.machines.matches_build_machine(self._target_object.for_machine)
+
+ @noPosargs
+ @noKwargs
+ def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+ if not (isinstance(self.held_object, build.Executable) and self.held_object.was_returned_by_find_program):
+ FeatureNew.single_use('BuildTarget.found', '0.59.0', subproject=self.held_object.subproject)
+ return True
+
+ @noPosargs
+ @noKwargs
+ def private_dir_include_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.IncludeDirs:
+ return build.IncludeDirs('', [], False, [self.interpreter.backend.get_target_private_dir(self._target_object)])
+
+ @noPosargs
+ @noKwargs
+ def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.interpreter.backend.get_target_filename_abs(self._target_object)
+
+ @noPosargs
+ @noKwargs
+ @FeatureDeprecated('BuildTarget.path', '0.55.0', 'Use BuildTarget.full_path instead')
+ def path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.interpreter.backend.get_target_filename_abs(self._target_object)
+
+ @noPosargs
+ @noKwargs
+ def outdir_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.interpreter.backend.get_target_dir(self._target_object)
+
+ @noKwargs
+ @typed_pos_args('extract_objects', varargs=(mesonlib.File, str))
+ def extract_objects_method(self, args: T.Tuple[T.List[mesonlib.FileOrString]], kwargs: TYPE_nkwargs) -> build.ExtractedObjects:
+ return self._target_object.extract_objects(args[0])
+
+ @noPosargs
+ @typed_kwargs(
+ 'extract_all_objects',
+ KwargInfo(
+ 'recursive', bool, default=False, since='0.46.0',
+ not_set_warning=textwrap.dedent('''\
+ extract_all_objects called without setting recursive
+ keyword argument. Meson currently defaults to
+ non-recursive to maintain backward compatibility but
+ the default will be changed in the future.
+ ''')
+ )
+ )
+ def extract_all_objects_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.BuildTargeMethodExtractAllObjects') -> build.ExtractedObjects:
+ return self._target_object.extract_all_objects(kwargs['recursive'])
+
+ @noPosargs
+ @noKwargs
+ def get_id_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self._target_object.get_id()
+
+ @FeatureNew('name', '0.54.0')
+ @noPosargs
+ @noKwargs
+ def name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self._target_object.name
+
+class ExecutableHolder(BuildTargetHolder[build.Executable]):
+ pass
+
+class StaticLibraryHolder(BuildTargetHolder[build.StaticLibrary]):
+ pass
+
+class SharedLibraryHolder(BuildTargetHolder[build.SharedLibrary]):
+ pass
+
+class BothLibrariesHolder(BuildTargetHolder[build.BothLibraries]):
+ def __init__(self, libs: build.BothLibraries, interp: 'Interpreter'):
+ # FIXME: This build target always represents the shared library, but
+ # that should be configurable.
+ super().__init__(libs, interp)
+ self.methods.update({'get_shared_lib': self.get_shared_lib_method,
+ 'get_static_lib': self.get_static_lib_method,
+ })
+
+ def __repr__(self) -> str:
+ r = '<{} {}: {}, {}: {}>'
+ h1 = self.held_object.shared
+ h2 = self.held_object.static
+ return r.format(self.__class__.__name__, h1.get_id(), h1.filename, h2.get_id(), h2.filename)
+
+ @noPosargs
+ @noKwargs
+ def get_shared_lib_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.SharedLibrary:
+ return self.held_object.shared
+
+ @noPosargs
+ @noKwargs
+ def get_static_lib_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.StaticLibrary:
+ return self.held_object.static
+
+class SharedModuleHolder(BuildTargetHolder[build.SharedModule]):
+ pass
+
+class JarHolder(BuildTargetHolder[build.Jar]):
+ pass
+
+class CustomTargetIndexHolder(ObjectHolder[build.CustomTargetIndex]):
+ def __init__(self, target: build.CustomTargetIndex, interp: 'Interpreter'):
+ super().__init__(target, interp)
+ self.methods.update({'full_path': self.full_path_method,
+ })
+
+ @FeatureNew('custom_target[i].full_path', '0.54.0')
+ @noPosargs
+ @noKwargs
+ def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ assert self.interpreter.backend is not None
+ return self.interpreter.backend.get_target_filename_abs(self.held_object)
+
+class CustomTargetHolder(ObjectHolder[build.CustomTarget]):
+ def __init__(self, target: 'build.CustomTarget', interp: 'Interpreter'):
+ super().__init__(target, interp)
+ self.methods.update({'full_path': self.full_path_method,
+ 'to_list': self.to_list_method,
+ })
+
+ def __repr__(self) -> str:
+ r = '<{} {}: {}>'
+ h = self.held_object
+ return r.format(self.__class__.__name__, h.get_id(), h.command)
+
+ @noPosargs
+ @noKwargs
+ def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+ return self.interpreter.backend.get_target_filename_abs(self.held_object)
+
+ @FeatureNew('custom_target.to_list', '0.54.0')
+ @noPosargs
+ @noKwargs
+ def to_list_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[build.CustomTargetIndex]:
+ result = []
+ for i in self.held_object:
+ result.append(i)
+ return result
+
+ def __getitem__(self, index: int) -> build.CustomTargetIndex:
+ return self.held_object[index]
+
+ def __setitem__(self, index: int, value: T.Any) -> None: # lgtm[py/unexpected-raise-in-special-method]
+ raise InterpreterException('Cannot set a member of a CustomTarget')
+
+ def __delitem__(self, index: int) -> None: # lgtm[py/unexpected-raise-in-special-method]
+ raise InterpreterException('Cannot delete a member of a CustomTarget')
+
+class RunTargetHolder(ObjectHolder[build.RunTarget]):
+ pass
+
+class AliasTargetHolder(ObjectHolder[build.AliasTarget]):
+ pass
+
+class GeneratedListHolder(ObjectHolder[build.GeneratedList]):
+ pass
+
+class GeneratorHolder(ObjectHolder[build.Generator]):
+ def __init__(self, gen: build.Generator, interpreter: 'Interpreter'):
+ super().__init__(gen, interpreter)
+ self.methods.update({'process': self.process_method})
+
+ @typed_pos_args('generator.process', min_varargs=1, varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList))
+ @typed_kwargs(
+ 'generator.process',
+ KwargInfo('preserve_path_from', str, since='0.45.0'),
+ KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+ )
+ def process_method(self,
+ args: T.Tuple[T.List[T.Union[str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]]],
+ kwargs: 'kwargs.GeneratorProcess') -> build.GeneratedList:
+ preserve_path_from = kwargs['preserve_path_from']
+ if preserve_path_from is not None:
+ preserve_path_from = os.path.normpath(preserve_path_from)
+ if not os.path.isabs(preserve_path_from):
+ # This is a bit of a hack. Fix properly before merging.
+ raise InvalidArguments('Preserve_path_from must be an absolute path for now. Sorry.')
+
+ if any(isinstance(a, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) for a in args[0]):
+ FeatureNew.single_use(
+ f'Calling generator.process with CustomTaget or Index of CustomTarget.',
+ '0.57.0', self.interpreter.subproject)
+
+ gl = self.held_object.process_files(args[0], self.interpreter,
+ preserve_path_from, extra_args=kwargs['extra_args'])
+
+ return gl
diff --git a/meson/mesonbuild/interpreter/kwargs.py b/meson/mesonbuild/interpreter/kwargs.py
new file mode 100644
index 000000000..b92b66fd7
--- /dev/null
+++ b/meson/mesonbuild/interpreter/kwargs.py
@@ -0,0 +1,139 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2021 The Meson Developers
+# Copyright © 2021 Intel Corporation
+
+"""Keyword Argument type annotations."""
+
+import typing as T
+
+from typing_extensions import TypedDict, Literal
+
+from .. import build
+from .. import coredata
+from ..mesonlib import MachineChoice, File, FileMode, FileOrString
+from .interpreterobjects import EnvironmentVariablesObject
+
+
+class FuncAddProjectArgs(TypedDict):
+
+ """Keyword Arguments for the add_*_arguments family of arguments.
+
+ including `add_global_arguments`, `add_project_arguments`, and their
+ link variants
+
+ Because of the use of a convertor function, we get the native keyword as
+ a MachineChoice instance already.
+ """
+
+ native: MachineChoice
+ language: T.List[str]
+
+
+class BaseTest(TypedDict):
+
+ """Shared base for the Rust module."""
+
+ args: T.List[T.Union[str, File, build.Target]]
+ should_fail: bool
+ timeout: int
+ workdir: T.Optional[str]
+ depends: T.List[T.Union[build.CustomTarget, build.BuildTarget]]
+ priority: int
+ env: T.Union[EnvironmentVariablesObject, T.List[str], T.Dict[str, str], str]
+ suite: T.List[str]
+
+
+class FuncBenchmark(BaseTest):
+
+ """Keyword Arguments shared between `test` and `benchmark`."""
+
+ protocol: Literal['exitcode', 'tap', 'gtest', 'rust']
+
+
+class FuncTest(FuncBenchmark):
+
+ """Keyword Arguments for `test`
+
+ `test` only adds the `is_prallel` argument over benchmark, so inherintance
+ is helpful here.
+ """
+
+ is_parallel: bool
+
+
+class ExtractRequired(TypedDict):
+
+ """Keyword Arguments consumed by the `extract_required_kwargs` function.
+
+ Any function that uses the `required` keyword argument which accepts either
+ a boolean or a feature option should inherit it's arguments from this class.
+ """
+
+ required: T.Union[bool, coredata.UserFeatureOption]
+
+
+class FuncGenerator(TypedDict):
+
+ """Keyword rguments for the generator function."""
+
+ arguments: T.List[str]
+ output: T.List[str]
+ depfile: bool
+ capture: bool
+ depends: T.List[T.Union[build.BuildTarget, build.CustomTarget]]
+
+
+class GeneratorProcess(TypedDict):
+
+ """Keyword Arguments for generator.process."""
+
+ preserve_path_from: T.Optional[str]
+ extra_args: T.List[str]
+
+class DependencyMethodPartialDependency(TypedDict):
+
+ """ Keyword Arguments for the dep.partial_dependency methods """
+
+ compile_args: bool
+ link_args: bool
+ links: bool
+ includes: bool
+ sources: bool
+
+class BuildTargeMethodExtractAllObjects(TypedDict):
+ recursive: bool
+
+class FuncInstallSubdir(TypedDict):
+
+ install_dir: str
+ strip_directory: bool
+ exclude_files: T.List[str]
+ exclude_directories: T.List[str]
+ install_mode: FileMode
+
+
+class FuncInstallData(TypedDict):
+
+ install_dir: str
+ sources: T.List[FileOrString]
+ rename: T.List[str]
+ install_mode: FileMode
+
+
+class FuncInstallHeaders(TypedDict):
+
+ install_dir: T.Optional[str]
+ install_mode: FileMode
+ subdir: T.Optional[str]
+
+
+class FuncInstallMan(TypedDict):
+
+ install_dir: T.Optional[str]
+ install_mode: FileMode
+ locale: T.Optional[str]
+
+
+class FuncImportModule(ExtractRequired):
+
+ disabler: bool
diff --git a/meson/mesonbuild/interpreter/mesonmain.py b/meson/mesonbuild/interpreter/mesonmain.py
new file mode 100644
index 000000000..97a695b9d
--- /dev/null
+++ b/meson/mesonbuild/interpreter/mesonmain.py
@@ -0,0 +1,382 @@
+import os
+
+from .. import mesonlib
+from .. import dependencies
+from .. import build
+from .. import mlog
+
+from ..mesonlib import MachineChoice, OptionKey
+from ..programs import OverrideProgram, ExternalProgram
+from ..interpreterbase import (MesonInterpreterObject, FeatureNewKwargs, FeatureNew, FeatureDeprecated,
+ typed_pos_args, permittedKwargs, noArgsFlattening, noPosargs, noKwargs,
+ MesonVersionString, InterpreterException)
+
+from .interpreterobjects import (ExecutableHolder, ExternalProgramHolder,
+ CustomTargetHolder, CustomTargetIndexHolder,
+ EnvironmentVariablesObject)
+
+import typing as T
+
+if T.TYPE_CHECKING:
+ from .interpreter import Interpreter
+
+class MesonMain(MesonInterpreterObject):
+ def __init__(self, build: 'build.Build', interpreter: 'Interpreter'):
+ super().__init__(subproject=interpreter.subproject)
+ self.build = build
+ self.interpreter = interpreter
+ self.methods.update({'get_compiler': self.get_compiler_method,
+ 'is_cross_build': self.is_cross_build_method,
+ 'has_exe_wrapper': self.has_exe_wrapper_method,
+ 'can_run_host_binaries': self.can_run_host_binaries_method,
+ 'is_unity': self.is_unity_method,
+ 'is_subproject': self.is_subproject_method,
+ 'current_source_dir': self.current_source_dir_method,
+ 'current_build_dir': self.current_build_dir_method,
+ 'source_root': self.source_root_method,
+ 'build_root': self.build_root_method,
+ 'project_source_root': self.project_source_root_method,
+ 'project_build_root': self.project_build_root_method,
+ 'global_source_root': self.global_source_root_method,
+ 'global_build_root': self.global_build_root_method,
+ 'add_install_script': self.add_install_script_method,
+ 'add_postconf_script': self.add_postconf_script_method,
+ 'add_dist_script': self.add_dist_script_method,
+ 'install_dependency_manifest': self.install_dependency_manifest_method,
+ 'override_dependency': self.override_dependency_method,
+ 'override_find_program': self.override_find_program_method,
+ 'project_version': self.project_version_method,
+ 'project_license': self.project_license_method,
+ 'version': self.version_method,
+ 'project_name': self.project_name_method,
+ 'get_cross_property': self.get_cross_property_method,
+ 'get_external_property': self.get_external_property_method,
+ 'has_external_property': self.has_external_property_method,
+ 'backend': self.backend_method,
+ 'add_devenv': self.add_devenv_method,
+ })
+
+ def _find_source_script(self, prog: T.Union[str, mesonlib.File, build.Executable, ExternalProgram], args):
+
+ if isinstance(prog, (build.Executable, ExternalProgram)):
+ return self.interpreter.backend.get_executable_serialisation([prog] + args)
+ found = self.interpreter.func_find_program({}, prog, {})
+ es = self.interpreter.backend.get_executable_serialisation([found] + args)
+ es.subproject = self.interpreter.subproject
+ return es
+
+ def _process_script_args(
+ self, name: str, args: T.List[T.Union[
+ str, mesonlib.File, CustomTargetHolder,
+ CustomTargetIndexHolder,
+ ExternalProgramHolder, ExecutableHolder,
+ ]], allow_built: bool = False) -> T.List[str]:
+ script_args = [] # T.List[str]
+ new = False
+ for a in args:
+ if isinstance(a, str):
+ script_args.append(a)
+ elif isinstance(a, mesonlib.File):
+ new = True
+ script_args.append(a.rel_to_builddir(self.interpreter.environment.source_dir))
+ elif isinstance(a, (build.BuildTarget, build.CustomTarget, build.CustomTargetIndex)):
+ if not allow_built:
+ raise InterpreterException(f'Arguments to {name} cannot be built')
+ new = True
+ script_args.extend([os.path.join(a.get_subdir(), o) for o in a.get_outputs()])
+
+ # This feels really hacky, but I'm not sure how else to fix
+ # this without completely rewriting install script handling.
+ # This is complicated by the fact that the install target
+ # depends on all.
+ if isinstance(a, build.CustomTargetIndex):
+ a.target.build_by_default = True
+ else:
+ a.build_by_default = True
+ elif isinstance(a, ExternalProgram):
+ script_args.extend(a.command)
+ new = True
+ else:
+ raise InterpreterException(
+ f'Arguments to {name} must be strings, Files, or CustomTargets, '
+ 'Indexes of CustomTargets')
+ if new:
+ FeatureNew.single_use(
+ f'Calling "{name}" with File, CustomTaget, Index of CustomTarget, '
+ 'Executable, or ExternalProgram',
+ '0.55.0', self.interpreter.subproject)
+ return script_args
+
+ @FeatureNewKwargs('add_install_script', '0.57.0', ['skip_if_destdir'])
+ @permittedKwargs({'skip_if_destdir'})
+ def add_install_script_method(self, args: 'T.Tuple[T.Union[str, mesonlib.File, ExecutableHolder], T.Union[str, mesonlib.File, CustomTargetHolder, CustomTargetIndexHolder], ...]', kwargs):
+ if len(args) < 1:
+ raise InterpreterException('add_install_script takes one or more arguments')
+ if isinstance(args[0], mesonlib.File):
+ FeatureNew.single_use('Passing file object to script parameter of add_install_script',
+ '0.57.0', self.interpreter.subproject)
+ skip_if_destdir = kwargs.get('skip_if_destdir', False)
+ if not isinstance(skip_if_destdir, bool):
+ raise InterpreterException('skip_if_destdir keyword argument must be boolean')
+ script_args = self._process_script_args('add_install_script', args[1:], allow_built=True)
+ script = self._find_source_script(args[0], script_args)
+ script.skip_if_destdir = skip_if_destdir
+ self.build.install_scripts.append(script)
+
+ @permittedKwargs(set())
+ def add_postconf_script_method(self, args, kwargs):
+ if len(args) < 1:
+ raise InterpreterException('add_postconf_script takes one or more arguments')
+ if isinstance(args[0], mesonlib.File):
+ FeatureNew.single_use('Passing file object to script parameter of add_postconf_script',
+ '0.57.0', self.interpreter.subproject)
+ script_args = self._process_script_args('add_postconf_script', args[1:], allow_built=True)
+ script = self._find_source_script(args[0], script_args)
+ self.build.postconf_scripts.append(script)
+
+ @permittedKwargs(set())
+ def add_dist_script_method(self, args, kwargs):
+ if len(args) < 1:
+ raise InterpreterException('add_dist_script takes one or more arguments')
+ if len(args) > 1:
+ FeatureNew.single_use('Calling "add_dist_script" with multiple arguments',
+ '0.49.0', self.interpreter.subproject)
+ if isinstance(args[0], mesonlib.File):
+ FeatureNew.single_use('Passing file object to script parameter of add_dist_script',
+ '0.57.0', self.interpreter.subproject)
+ if self.interpreter.subproject != '':
+ FeatureNew.single_use('Calling "add_dist_script" in a subproject',
+ '0.58.0', self.interpreter.subproject)
+ script_args = self._process_script_args('add_dist_script', args[1:], allow_built=True)
+ script = self._find_source_script(args[0], script_args)
+ self.build.dist_scripts.append(script)
+
+ @noPosargs
+ @permittedKwargs({})
+ def current_source_dir_method(self, args, kwargs):
+ src = self.interpreter.environment.source_dir
+ sub = self.interpreter.subdir
+ if sub == '':
+ return src
+ return os.path.join(src, sub)
+
+ @noPosargs
+ @permittedKwargs({})
+ def current_build_dir_method(self, args, kwargs):
+ src = self.interpreter.environment.build_dir
+ sub = self.interpreter.subdir
+ if sub == '':
+ return src
+ return os.path.join(src, sub)
+
+ @noPosargs
+ @permittedKwargs({})
+ def backend_method(self, args, kwargs):
+ return self.interpreter.backend.name
+
+ @noPosargs
+ @permittedKwargs({})
+ @FeatureDeprecated('meson.source_root', '0.56.0', 'use meson.project_source_root() or meson.global_source_root() instead.')
+ def source_root_method(self, args, kwargs):
+ return self.interpreter.environment.source_dir
+
+ @noPosargs
+ @permittedKwargs({})
+ @FeatureDeprecated('meson.build_root', '0.56.0', 'use meson.project_build_root() or meson.global_build_root() instead.')
+ def build_root_method(self, args, kwargs):
+ return self.interpreter.environment.build_dir
+
+ @noPosargs
+ @permittedKwargs({})
+ @FeatureNew('meson.project_source_root', '0.56.0')
+ def project_source_root_method(self, args, kwargs):
+ src = self.interpreter.environment.source_dir
+ sub = self.interpreter.root_subdir
+ if sub == '':
+ return src
+ return os.path.join(src, sub)
+
+ @noPosargs
+ @permittedKwargs({})
+ @FeatureNew('meson.project_build_root', '0.56.0')
+ def project_build_root_method(self, args, kwargs):
+ src = self.interpreter.environment.build_dir
+ sub = self.interpreter.root_subdir
+ if sub == '':
+ return src
+ return os.path.join(src, sub)
+
+ @noPosargs
+ @noKwargs
+ @FeatureNew('meson.global_source_root', '0.58.0')
+ def global_source_root_method(self, args, kwargs):
+ return self.interpreter.environment.source_dir
+
+ @noPosargs
+ @noKwargs
+ @FeatureNew('meson.global_build_root', '0.58.0')
+ def global_build_root_method(self, args, kwargs):
+ return self.interpreter.environment.build_dir
+
+ @noPosargs
+ @permittedKwargs({})
+ @FeatureDeprecated('meson.has_exe_wrapper', '0.55.0', 'use meson.can_run_host_binaries instead.')
+ def has_exe_wrapper_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool:
+ return self.can_run_host_binaries_impl(args, kwargs)
+
+ @noPosargs
+ @permittedKwargs({})
+ @FeatureNew('meson.can_run_host_binaries', '0.55.0')
+ def can_run_host_binaries_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool:
+ return self.can_run_host_binaries_impl(args, kwargs)
+
+ def can_run_host_binaries_impl(self, args, kwargs):
+ if (self.is_cross_build_method(None, None) and
+ self.build.environment.need_exe_wrapper()):
+ if self.build.environment.exe_wrapper is None:
+ return False
+ # We return True when exe_wrap is defined, when it's not needed, and
+ # when we're compiling natively. The last two are semantically confusing.
+ # Need to revisit this.
+ return True
+
+ @noPosargs
+ @permittedKwargs({})
+ def is_cross_build_method(self, args, kwargs):
+ return self.build.environment.is_cross_build()
+
+ @permittedKwargs({'native'})
+ def get_compiler_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('get_compiler_method must have one and only one argument.')
+ cname = args[0]
+ for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+ clist = self.interpreter.coredata.compilers[for_machine]
+ if cname in clist:
+ return clist[cname]
+ raise InterpreterException(f'Tried to access compiler for language "{cname}", not specified for {for_machine.get_lower_case_name()} machine.')
+
+ @noPosargs
+ @permittedKwargs({})
+ def is_unity_method(self, args, kwargs):
+ optval = self.interpreter.environment.coredata.get_option(OptionKey('unity'))
+ if optval == 'on' or (optval == 'subprojects' and self.interpreter.is_subproject()):
+ return True
+ return False
+
+ @noPosargs
+ @permittedKwargs({})
+ def is_subproject_method(self, args, kwargs):
+ return self.interpreter.is_subproject()
+
+ @permittedKwargs({})
+ def install_dependency_manifest_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Must specify manifest install file name')
+ if not isinstance(args[0], str):
+ raise InterpreterException('Argument must be a string.')
+ self.build.dep_manifest_name = args[0]
+
+ @FeatureNew('meson.override_find_program', '0.46.0')
+ @permittedKwargs({})
+ def override_find_program_method(self, args, kwargs):
+ if len(args) != 2:
+ raise InterpreterException('Override needs two arguments')
+ name, exe = args
+ if not isinstance(name, str):
+ raise InterpreterException('First argument must be a string')
+ if isinstance(exe, mesonlib.File):
+ abspath = exe.absolute_path(self.interpreter.environment.source_dir,
+ self.interpreter.environment.build_dir)
+ if not os.path.exists(abspath):
+ raise InterpreterException('Tried to override %s with a file that does not exist.' % name)
+ exe = OverrideProgram(name, abspath)
+ if not isinstance(exe, (ExternalProgram, build.Executable)):
+ raise InterpreterException('Second argument must be an external program or executable.')
+ self.interpreter.add_find_program_override(name, exe)
+
+ @FeatureNew('meson.override_dependency', '0.54.0')
+ @permittedKwargs({'native'})
+ def override_dependency_method(self, args, kwargs):
+ if len(args) != 2:
+ raise InterpreterException('Override needs two arguments')
+ name = args[0]
+ dep = args[1]
+ if not isinstance(name, str) or not name:
+ raise InterpreterException('First argument must be a string and cannot be empty')
+ if not isinstance(dep, dependencies.Dependency):
+ raise InterpreterException('Second argument must be a dependency object')
+ identifier = dependencies.get_dep_identifier(name, kwargs)
+ for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+ override = self.build.dependency_overrides[for_machine].get(identifier)
+ if override:
+ m = 'Tried to override dependency {!r} which has already been resolved or overridden at {}'
+ location = mlog.get_error_location_string(override.node.filename, override.node.lineno)
+ raise InterpreterException(m.format(name, location))
+ self.build.dependency_overrides[for_machine][identifier] = \
+ build.DependencyOverride(dep, self.interpreter.current_node)
+
+ @noPosargs
+ @permittedKwargs({})
+ def project_version_method(self, args, kwargs):
+ return self.build.dep_manifest[self.interpreter.active_projectname]['version']
+
+ @FeatureNew('meson.project_license()', '0.45.0')
+ @noPosargs
+ @permittedKwargs({})
+ def project_license_method(self, args, kwargs):
+ return self.build.dep_manifest[self.interpreter.active_projectname]['license']
+
+ @noPosargs
+ @permittedKwargs({})
+ def version_method(self, args, kwargs):
+ return MesonVersionString(self.interpreter.coredata.version)
+
+ @noPosargs
+ @permittedKwargs({})
+ def project_name_method(self, args, kwargs):
+ return self.interpreter.active_projectname
+
+ def __get_external_property_impl(self, propname: str, fallback: T.Optional[object], machine: MachineChoice) -> object:
+ """Shared implementation for get_cross_property and get_external_property."""
+ try:
+ return self.interpreter.environment.properties[machine][propname]
+ except KeyError:
+ if fallback is not None:
+ return fallback
+ raise InterpreterException(f'Unknown property for {machine.get_lower_case_name()} machine: {propname}')
+
+ @noArgsFlattening
+ @permittedKwargs({})
+ @FeatureDeprecated('meson.get_cross_property', '0.58.0', 'Use meson.get_external_property() instead')
+ @typed_pos_args('meson.get_cross_property', str, optargs=[object])
+ def get_cross_property_method(self, args: T.Tuple[str, T.Optional[object]], kwargs: T.Dict[str, T.Any]) -> object:
+ propname, fallback = args
+ return self.__get_external_property_impl(propname, fallback, MachineChoice.HOST)
+
+ @noArgsFlattening
+ @permittedKwargs({'native'})
+ @FeatureNew('meson.get_external_property', '0.54.0')
+ @typed_pos_args('meson.get_external_property', str, optargs=[object])
+ def get_external_property_method(self, args: T.Tuple[str, T.Optional[object]], kwargs: T.Dict[str, T.Any]) -> object:
+ propname, fallback = args
+ machine = self.interpreter.machine_from_native_kwarg(kwargs)
+ return self.__get_external_property_impl(propname, fallback, machine)
+
+
+ @permittedKwargs({'native'})
+ @FeatureNew('meson.has_external_property', '0.58.0')
+ @typed_pos_args('meson.has_external_property', str)
+ def has_external_property_method(self, args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str:
+ prop_name = args[0]
+ for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+ return prop_name in self.interpreter.environment.properties[for_machine]
+
+ @FeatureNew('add_devenv', '0.58.0')
+ @noKwargs
+ @typed_pos_args('add_devenv', (str, list, dict, EnvironmentVariablesObject))
+ def add_devenv_method(self, args: T.Union[str, list, dict, EnvironmentVariablesObject], kwargs: T.Dict[str, T.Any]) -> None:
+ env = args[0]
+ if isinstance(env, (str, list, dict)):
+ env = EnvironmentVariablesObject(env)
+ self.build.devenv.append(env.vars)
diff --git a/meson/mesonbuild/interpreterbase/__init__.py b/meson/mesonbuild/interpreterbase/__init__.py
new file mode 100644
index 000000000..8e45cdb8f
--- /dev/null
+++ b/meson/mesonbuild/interpreterbase/__init__.py
@@ -0,0 +1,122 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__all__ = [
+ 'InterpreterObject',
+ 'MesonInterpreterObject',
+ 'ObjectHolder',
+ 'RangeHolder',
+ 'MesonVersionString',
+ 'MutableInterpreterObject',
+
+ 'Disabler',
+ 'is_disabled',
+
+ 'InterpreterException',
+ 'InvalidCode',
+ 'InvalidArguments',
+ 'SubdirDoneRequest',
+ 'ContinueRequest',
+ 'BreakRequest',
+
+ 'check_stringlist',
+ 'default_resolve_key',
+ 'flatten',
+ 'resolve_second_level_holders',
+
+ 'noPosargs',
+ 'builtinMethodNoKwargs',
+ 'noKwargs',
+ 'stringArgs',
+ 'noArgsFlattening',
+ 'noSecondLevelHolderResolving',
+ 'permissive_unholder_return',
+ 'disablerIfNotFound',
+ 'permittedKwargs',
+ 'typed_pos_args',
+ 'ContainerTypeInfo',
+ 'KwargInfo',
+ 'typed_kwargs',
+ 'FeatureCheckBase',
+ 'FeatureNew',
+ 'FeatureDeprecated',
+ 'FeatureNewKwargs',
+ 'FeatureDeprecatedKwargs',
+
+ 'InterpreterBase',
+
+ 'TV_fw_var',
+ 'TV_fw_args',
+ 'TV_fw_kwargs',
+ 'TV_func',
+ 'TYPE_elementary',
+ 'TYPE_var',
+ 'TYPE_nvar',
+ 'TYPE_kwargs',
+ 'TYPE_nkwargs',
+ 'TYPE_key_resolver',
+]
+
+from .baseobjects import (
+ InterpreterObject,
+ MesonInterpreterObject,
+ ObjectHolder,
+ RangeHolder,
+ MutableInterpreterObject,
+
+ TV_fw_var,
+ TV_fw_args,
+ TV_fw_kwargs,
+ TV_func,
+ TYPE_elementary,
+ TYPE_var,
+ TYPE_nvar,
+ TYPE_kwargs,
+ TYPE_nkwargs,
+ TYPE_key_resolver,
+)
+
+from .decorators import (
+ noPosargs,
+ builtinMethodNoKwargs,
+ noKwargs,
+ stringArgs,
+ noArgsFlattening,
+ noSecondLevelHolderResolving,
+ permissive_unholder_return,
+ disablerIfNotFound,
+ permittedKwargs,
+ typed_pos_args,
+ ContainerTypeInfo,
+ KwargInfo,
+ typed_kwargs,
+ FeatureCheckBase,
+ FeatureNew,
+ FeatureDeprecated,
+ FeatureNewKwargs,
+ FeatureDeprecatedKwargs,
+)
+
+from .exceptions import (
+ InterpreterException,
+ InvalidCode,
+ InvalidArguments,
+ SubdirDoneRequest,
+ ContinueRequest,
+ BreakRequest,
+)
+
+from .disabler import Disabler, is_disabled
+from .helpers import check_stringlist, default_resolve_key, flatten, resolve_second_level_holders
+from .interpreterbase import MesonVersionString, InterpreterBase
diff --git a/meson/mesonbuild/interpreterbase/_unholder.py b/meson/mesonbuild/interpreterbase/_unholder.py
new file mode 100644
index 000000000..10c7cfc8f
--- /dev/null
+++ b/meson/mesonbuild/interpreterbase/_unholder.py
@@ -0,0 +1,39 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .baseobjects import InterpreterObject, MesonInterpreterObject, ObjectHolder, TYPE_var
+from .exceptions import InvalidArguments
+from ..mesonlib import HoldableObject, MesonBugException
+
+import typing as T
+
+def _unholder(obj: T.Union[TYPE_var, InterpreterObject], *, permissive: bool = False) -> TYPE_var:
+ if isinstance(obj, (int, bool, str)):
+ return obj
+ elif isinstance(obj, list):
+ return [_unholder(x, permissive=permissive) for x in obj]
+ elif isinstance(obj, dict):
+ return {k: _unholder(v, permissive=permissive) for k, v in obj.items()}
+ elif isinstance(obj, ObjectHolder):
+ assert isinstance(obj.held_object, HoldableObject)
+ return obj.held_object
+ elif isinstance(obj, MesonInterpreterObject):
+ return obj
+ elif isinstance(obj, HoldableObject) and permissive:
+ return obj
+ elif isinstance(obj, HoldableObject):
+ raise MesonBugException(f'Argument {obj} of type {type(obj).__name__} is not held by an ObjectHolder.')
+ elif isinstance(obj, InterpreterObject):
+ raise InvalidArguments(f'Argument {obj} of type {type(obj).__name__} cannot be passed to a method or function')
+ raise MesonBugException(f'Unknown object {obj} of type {type(obj).__name__} in the parameters.')
diff --git a/meson/mesonbuild/interpreterbase/baseobjects.py b/meson/mesonbuild/interpreterbase/baseobjects.py
new file mode 100644
index 000000000..8b1293ca2
--- /dev/null
+++ b/meson/mesonbuild/interpreterbase/baseobjects.py
@@ -0,0 +1,96 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mparser
+from .exceptions import InvalidCode
+from .helpers import flatten, resolve_second_level_holders
+from ..mesonlib import HoldableObject
+
+import typing as T
+
+if T.TYPE_CHECKING:
+ # Object holders need the actual interpreter
+ from ..interpreter import Interpreter
+
+TV_fw_var = T.Union[str, int, bool, list, dict, 'InterpreterObject']
+TV_fw_args = T.List[T.Union[mparser.BaseNode, TV_fw_var]]
+TV_fw_kwargs = T.Dict[str, T.Union[mparser.BaseNode, TV_fw_var]]
+
+TV_func = T.TypeVar('TV_func', bound=T.Callable[..., T.Any])
+
+TYPE_elementary = T.Union[str, int, bool, T.List[T.Any], T.Dict[str, T.Any]]
+TYPE_var = T.Union[TYPE_elementary, HoldableObject, 'MesonInterpreterObject']
+TYPE_nvar = T.Union[TYPE_var, mparser.BaseNode]
+TYPE_kwargs = T.Dict[str, TYPE_var]
+TYPE_nkwargs = T.Dict[str, TYPE_nvar]
+TYPE_key_resolver = T.Callable[[mparser.BaseNode], str]
+
+class InterpreterObject:
+ def __init__(self, *, subproject: T.Optional[str] = None) -> None:
+ self.methods: T.Dict[
+ str,
+ T.Callable[[T.List[TYPE_var], TYPE_kwargs], TYPE_var]
+ ] = {}
+ # Current node set during a method call. This can be used as location
+ # when printing a warning message during a method call.
+ self.current_node: mparser.BaseNode = None
+ self.subproject: str = subproject or ''
+
+ def method_call(
+ self,
+ method_name: str,
+ args: T.List[TYPE_var],
+ kwargs: TYPE_kwargs
+ ) -> TYPE_var:
+ if method_name in self.methods:
+ method = self.methods[method_name]
+ if not getattr(method, 'no-args-flattening', False):
+ args = flatten(args)
+ if not getattr(method, 'no-second-level-holder-flattening', False):
+ args, kwargs = resolve_second_level_holders(args, kwargs)
+ return method(args, kwargs)
+ raise InvalidCode(f'Unknown method "{method_name}" in object {self} of type {type(self).__name__}.')
+
+class MesonInterpreterObject(InterpreterObject):
+ ''' All non-elementary objects and non-object-holders should be derived from this '''
+
+class MutableInterpreterObject:
+ ''' Dummy class to mark the object type as mutable '''
+
+InterpreterObjectTypeVar = T.TypeVar('InterpreterObjectTypeVar', bound=HoldableObject)
+
+class ObjectHolder(InterpreterObject, T.Generic[InterpreterObjectTypeVar]):
+ def __init__(self, obj: InterpreterObjectTypeVar, interpreter: 'Interpreter') -> None:
+ super().__init__(subproject=interpreter.subproject)
+ assert isinstance(obj, HoldableObject), f'This is a bug: Trying to hold object of type `{type(obj).__name__}` that is not an `HoldableObject`'
+ self.held_object = obj
+ self.interpreter = interpreter
+ self.env = self.interpreter.environment
+
+ def __repr__(self) -> str:
+ return f'<[{type(self).__name__}] holds [{type(self.held_object).__name__}]: {self.held_object!r}>'
+
+class RangeHolder(MesonInterpreterObject):
+ def __init__(self, start: int, stop: int, step: int, *, subproject: str) -> None:
+ super().__init__(subproject=subproject)
+ self.range = range(start, stop, step)
+
+ def __iter__(self) -> T.Iterator[int]:
+ return iter(self.range)
+
+ def __getitem__(self, key: int) -> int:
+ return self.range[key]
+
+ def __len__(self) -> int:
+ return len(self.range)
diff --git a/meson/mesonbuild/interpreterbase/decorators.py b/meson/mesonbuild/interpreterbase/decorators.py
new file mode 100644
index 000000000..eabc6d864
--- /dev/null
+++ b/meson/mesonbuild/interpreterbase/decorators.py
@@ -0,0 +1,650 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mesonlib, mlog
+from .baseobjects import TV_func, TYPE_var
+from .disabler import Disabler
+from .exceptions import InterpreterException, InvalidArguments
+from .helpers import check_stringlist, get_callee_args
+from ._unholder import _unholder
+
+from functools import wraps
+import abc
+import itertools
+import typing as T
+
+def noPosargs(f: TV_func) -> TV_func:
+ @wraps(f)
+ def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+ args = get_callee_args(wrapped_args)[2]
+ if args:
+ raise InvalidArguments('Function does not take positional arguments.')
+ return f(*wrapped_args, **wrapped_kwargs)
+ return T.cast(TV_func, wrapped)
+
+def builtinMethodNoKwargs(f: TV_func) -> TV_func:
+ @wraps(f)
+ def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+ node = wrapped_args[0].current_node
+ method_name = wrapped_args[2]
+ kwargs = wrapped_args[4]
+ if kwargs:
+ mlog.warning(f'Method {method_name!r} does not take keyword arguments.',
+ 'This will become a hard error in the future',
+ location=node)
+ return f(*wrapped_args, **wrapped_kwargs)
+ return T.cast(TV_func, wrapped)
+
+def noKwargs(f: TV_func) -> TV_func:
+ @wraps(f)
+ def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+ kwargs = get_callee_args(wrapped_args)[3]
+ if kwargs:
+ raise InvalidArguments('Function does not take keyword arguments.')
+ return f(*wrapped_args, **wrapped_kwargs)
+ return T.cast(TV_func, wrapped)
+
+def stringArgs(f: TV_func) -> TV_func:
+ @wraps(f)
+ def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+ args = get_callee_args(wrapped_args)[2]
+ assert(isinstance(args, list))
+ check_stringlist(args)
+ return f(*wrapped_args, **wrapped_kwargs)
+ return T.cast(TV_func, wrapped)
+
+def noArgsFlattening(f: TV_func) -> TV_func:
+ setattr(f, 'no-args-flattening', True) # noqa: B010
+ return f
+
+def noSecondLevelHolderResolving(f: TV_func) -> TV_func:
+ setattr(f, 'no-second-level-holder-flattening', True) # noqa: B010
+ return f
+
+def permissive_unholder_return(f: TV_func) -> T.Callable[..., TYPE_var]:
+ @wraps(f)
+ def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+ res = f(*wrapped_args, **wrapped_kwargs)
+ return _unholder(res, permissive=True)
+ return T.cast(T.Callable[..., TYPE_var], wrapped)
+
+def disablerIfNotFound(f: TV_func) -> TV_func:
+ @wraps(f)
+ def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+ kwargs = get_callee_args(wrapped_args)[3]
+ disabler = kwargs.pop('disabler', False)
+ ret = f(*wrapped_args, **wrapped_kwargs)
+ if disabler and not ret.found():
+ return Disabler()
+ return ret
+ return T.cast(TV_func, wrapped)
+
+class permittedKwargs:
+
+ def __init__(self, permitted: T.Set[str]):
+ self.permitted = permitted # type: T.Set[str]
+
+ def __call__(self, f: TV_func) -> TV_func:
+ @wraps(f)
+ def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+ s, node, args, kwargs, _ = get_callee_args(wrapped_args)
+ for k in kwargs:
+ if k not in self.permitted:
+ mlog.warning(f'''Passed invalid keyword argument "{k}".''', location=node)
+ mlog.warning('This will become a hard error in the future.')
+ return f(*wrapped_args, **wrapped_kwargs)
+ return T.cast(TV_func, wrapped)
+
+
+def typed_pos_args(name: str, *types: T.Union[T.Type, T.Tuple[T.Type, ...]],
+ varargs: T.Optional[T.Union[T.Type, T.Tuple[T.Type, ...]]] = None,
+ optargs: T.Optional[T.List[T.Union[T.Type, T.Tuple[T.Type, ...]]]] = None,
+ min_varargs: int = 0, max_varargs: int = 0) -> T.Callable[..., T.Any]:
+ """Decorator that types type checking of positional arguments.
+
+ This supports two different models of optional aguments, the first is the
+ variadic argument model. Variadic arguments are a possibly bounded,
+ possibly unbounded number of arguments of the same type (unions are
+ supported). The second is the standard default value model, in this case
+ a number of optional arguments may be provided, but they are still
+ ordered, and they may have different types.
+
+ This function does not support mixing variadic and default arguments.
+
+ :name: The name of the decorated function (as displayed in error messages)
+ :varargs: They type(s) of any variadic arguments the function takes. If
+ None the function takes no variadic args
+ :min_varargs: the minimum number of variadic arguments taken
+ :max_varargs: the maximum number of variadic arguments taken. 0 means unlimited
+ :optargs: The types of any optional arguments parameters taken. If None
+ then no optional paramters are taken.
+
+ Some examples of usage blow:
+ >>> @typed_pos_args('mod.func', str, (str, int))
+ ... def func(self, state: ModuleState, args: T.Tuple[str, T.Union[str, int]], kwargs: T.Dict[str, T.Any]) -> T.Any:
+ ... pass
+
+ >>> @typed_pos_args('method', str, varargs=str)
+ ... def method(self, node: BaseNode, args: T.Tuple[str, T.List[str]], kwargs: T.Dict[str, T.Any]) -> T.Any:
+ ... pass
+
+ >>> @typed_pos_args('method', varargs=str, min_varargs=1)
+ ... def method(self, node: BaseNode, args: T.Tuple[T.List[str]], kwargs: T.Dict[str, T.Any]) -> T.Any:
+ ... pass
+
+ >>> @typed_pos_args('method', str, optargs=[(str, int), str])
+ ... def method(self, node: BaseNode, args: T.Tuple[str, T.Optional[T.Union[str, int]], T.Optional[str]], kwargs: T.Dict[str, T.Any]) -> T.Any:
+ ... pass
+
+ When should you chose `typed_pos_args('name', varargs=str,
+ min_varargs=1)` vs `typed_pos_args('name', str, varargs=str)`?
+
+ The answer has to do with the semantics of the function, if all of the
+ inputs are the same type (such as with `files()`) then the former is
+ correct, all of the arguments are string names of files. If the first
+ argument is something else the it should be separated.
+ """
+ def inner(f: TV_func) -> TV_func:
+
+ @wraps(f)
+ def wrapper(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+ args = get_callee_args(wrapped_args)[2]
+
+ # These are implementation programming errors, end users should never see them.
+ assert isinstance(args, list), args
+ assert max_varargs >= 0, 'max_varags cannot be negative'
+ assert min_varargs >= 0, 'min_varags cannot be negative'
+ assert optargs is None or varargs is None, \
+ 'varargs and optargs not supported together as this would be ambiguous'
+
+ num_args = len(args)
+ num_types = len(types)
+ a_types = types
+
+ if varargs:
+ min_args = num_types + min_varargs
+ max_args = num_types + max_varargs
+ if max_varargs == 0 and num_args < min_args:
+ raise InvalidArguments(f'{name} takes at least {min_args} arguments, but got {num_args}.')
+ elif max_varargs != 0 and (num_args < min_args or num_args > max_args):
+ raise InvalidArguments(f'{name} takes between {min_args} and {max_args} arguments, but got {num_args}.')
+ elif optargs:
+ if num_args < num_types:
+ raise InvalidArguments(f'{name} takes at least {num_types} arguments, but got {num_args}.')
+ elif num_args > num_types + len(optargs):
+ raise InvalidArguments(f'{name} takes at most {num_types + len(optargs)} arguments, but got {num_args}.')
+ # Add the number of positional arguments required
+ if num_args > num_types:
+ diff = num_args - num_types
+ a_types = tuple(list(types) + list(optargs[:diff]))
+ elif num_args != num_types:
+ raise InvalidArguments(f'{name} takes exactly {num_types} arguments, but got {num_args}.')
+
+ for i, (arg, type_) in enumerate(itertools.zip_longest(args, a_types, fillvalue=varargs), start=1):
+ if not isinstance(arg, type_):
+ if isinstance(type_, tuple):
+ shouldbe = 'one of: {}'.format(", ".join(f'"{t.__name__}"' for t in type_))
+ else:
+ shouldbe = f'"{type_.__name__}"'
+ raise InvalidArguments(f'{name} argument {i} was of type "{type(arg).__name__}" but should have been {shouldbe}')
+
+ # Ensure that we're actually passing a tuple.
+ # Depending on what kind of function we're calling the length of
+ # wrapped_args can vary.
+ nargs = list(wrapped_args)
+ i = nargs.index(args)
+ if varargs:
+ # if we have varargs we need to split them into a separate
+ # tuple, as python's typing doesn't understand tuples with
+ # fixed elements and variadic elements, only one or the other.
+ # so in that case we need T.Tuple[int, str, float, T.Tuple[str, ...]]
+ pos = args[:len(types)]
+ var = list(args[len(types):])
+ pos.append(var)
+ nargs[i] = tuple(pos)
+ elif optargs:
+ if num_args < num_types + len(optargs):
+ diff = num_types + len(optargs) - num_args
+ nargs[i] = tuple(list(args) + [None] * diff)
+ else:
+ nargs[i] = args
+ else:
+ nargs[i] = tuple(args)
+ return f(*nargs, **wrapped_kwargs)
+
+ return T.cast(TV_func, wrapper)
+ return inner
+
+
+class ContainerTypeInfo:
+
+ """Container information for keyword arguments.
+
+ For keyword arguments that are containers (list or dict), this class encodes
+ that information.
+
+ :param container: the type of container
+ :param contains: the types the container holds
+ :param pairs: if the container is supposed to be of even length.
+ This is mainly used for interfaces that predate the addition of dictionaries, and use
+ `[key, value, key2, value2]` format.
+ :param allow_empty: Whether this container is allowed to be empty
+ There are some cases where containers not only must be passed, but must
+ not be empty, and other cases where an empty container is allowed.
+ """
+
+ def __init__(self, container: T.Type, contains: T.Union[T.Type, T.Tuple[T.Type, ...]], *,
+ pairs: bool = False, allow_empty: bool = True) :
+ self.container = container
+ self.contains = contains
+ self.pairs = pairs
+ self.allow_empty = allow_empty
+
+ def check(self, value: T.Any) -> T.Optional[str]:
+ """Check that a value is valid.
+
+ :param value: A value to check
+ :return: If there is an error then a string message, otherwise None
+ """
+ if not isinstance(value, self.container):
+ return f'container type was "{type(value).__name__}", but should have been "{self.container.__name__}"'
+ iter_ = iter(value.values()) if isinstance(value, dict) else iter(value)
+ for each in iter_:
+ if not isinstance(each, self.contains):
+ if isinstance(self.contains, tuple):
+ shouldbe = 'one of: {}'.format(", ".join(f'"{t.__name__}"' for t in self.contains))
+ else:
+ shouldbe = f'"{self.contains.__name__}"'
+ return f'contained a value of type "{type(each).__name__}" but should have been {shouldbe}'
+ if self.pairs and len(value) % 2 != 0:
+ return 'container should be of even length, but is not'
+ if not value and not self.allow_empty:
+ return 'container is empty, but not allowed to be'
+ return None
+
+
+_T = T.TypeVar('_T')
+
+class _NULL_T:
+ """Special null type for evolution, this is an implementation detail."""
+
+
+_NULL = _NULL_T()
+
+class KwargInfo(T.Generic[_T]):
+
+ """A description of a keyword argument to a meson function
+
+ This is used to describe a value to the :func:typed_kwargs function.
+
+ :param name: the name of the parameter
+ :param types: A type or tuple of types that are allowed, or a :class:ContainerType
+ :param required: Whether this is a required keyword argument. defaults to False
+ :param listify: If true, then the argument will be listified before being
+ checked. This is useful for cases where the Meson DSL allows a scalar or
+ a container, but internally we only want to work with containers
+ :param default: A default value to use if this isn't set. defaults to None,
+ this may be safely set to a mutable type, as long as that type does not
+ itself contain mutable types, typed_kwargs will copy the default
+ :param since: Meson version in which this argument has been added. defaults to None
+ :param deprecated: Meson version in which this argument has been deprecated. defaults to None
+ :param validator: A callable that does additional validation. This is mainly
+ intended for cases where a string is expected, but only a few specific
+ values are accepted. Must return None if the input is valid, or a
+ message if the input is invalid
+ :param convertor: A callable that converts the raw input value into a
+ different type. This is intended for cases such as the meson DSL using a
+ string, but the implementation using an Enum. This should not do
+ validation, just converstion.
+ :param deprecated_values: a dictionary mapping a value to the version of
+ meson it was deprecated in.
+ :param since_values: a dictionary mapping a value to the version of meson it was
+ added in.
+ :param not_set_warning: A warning messsage that is logged if the kwarg is not
+ set by the user.
+ """
+
+ def __init__(self, name: str, types: T.Union[T.Type[_T], T.Tuple[T.Type[_T], ...], ContainerTypeInfo],
+ *, required: bool = False, listify: bool = False,
+ default: T.Optional[_T] = None,
+ since: T.Optional[str] = None,
+ since_values: T.Optional[T.Dict[str, str]] = None,
+ deprecated: T.Optional[str] = None,
+ deprecated_values: T.Optional[T.Dict[str, str]] = None,
+ validator: T.Optional[T.Callable[[_T], T.Optional[str]]] = None,
+ convertor: T.Optional[T.Callable[[_T], TYPE_var]] = None,
+ not_set_warning: T.Optional[str] = None):
+ self.name = name
+ self.types = types
+ self.required = required
+ self.listify = listify
+ self.default = default
+ self.since_values = since_values
+ self.since = since
+ self.deprecated = deprecated
+ self.deprecated_values = deprecated_values
+ self.validator = validator
+ self.convertor = convertor
+ self.not_set_warning = not_set_warning
+
+ def evolve(self, *,
+ required: T.Union[bool, _NULL_T] = _NULL,
+ listify: T.Union[bool, _NULL_T] = _NULL,
+ default: T.Union[_T, None, _NULL_T] = _NULL,
+ since: T.Union[str, None, _NULL_T] = _NULL,
+ since_values: T.Union[T.Dict[str, str], None, _NULL_T] = _NULL,
+ deprecated: T.Union[str, None, _NULL_T] = _NULL,
+ deprecated_values: T.Union[T.Dict[str, str], None, _NULL_T] = _NULL,
+ validator: T.Union[T.Callable[[_T], T.Optional[str]], None, _NULL_T] = _NULL,
+ convertor: T.Union[T.Callable[[_T], TYPE_var], None, _NULL_T] = _NULL) -> 'KwargInfo':
+ """Create a shallow copy of this KwargInfo, with modifications.
+
+ This allows us to create a new copy of a KwargInfo with modifications.
+ This allows us to use a shared kwarg that implements complex logic, but
+ has slight differences in usage, such as being added to different
+ functions in different versions of Meson.
+
+ The use the _NULL special value here allows us to pass None, which has
+ meaning in many of these cases. _NULL itself is never stored, always
+ being replaced by either the copy in self, or the provided new version.
+ """
+ return type(self)(
+ self.name,
+ self.types,
+ listify=listify if not isinstance(listify, _NULL_T) else self.listify,
+ required=required if not isinstance(required, _NULL_T) else self.required,
+ default=default if not isinstance(default, _NULL_T) else self.default,
+ since=since if not isinstance(since, _NULL_T) else self.since,
+ since_values=since_values if not isinstance(since_values, _NULL_T) else self.since_values,
+ deprecated=deprecated if not isinstance(deprecated, _NULL_T) else self.deprecated,
+ deprecated_values=deprecated_values if not isinstance(deprecated_values, _NULL_T) else self.deprecated_values,
+ validator=validator if not isinstance(validator, _NULL_T) else self.validator,
+ convertor=convertor if not isinstance(convertor, _NULL_T) else self.convertor,
+ )
+
+
+
+def typed_kwargs(name: str, *types: KwargInfo) -> T.Callable[..., T.Any]:
+ """Decorator for type checking keyword arguments.
+
+ Used to wrap a meson DSL implementation function, where it checks various
+ things about keyword arguments, including the type, and various other
+ information. For non-required values it sets the value to a default, which
+ means the value will always be provided.
+
+ If type tyhpe is a :class:ContainerTypeInfo, then the default value will be
+ passed as an argument to the container initializer, making a shallow copy
+
+ :param name: the name of the function, including the object it's attached ot
+ (if applicable)
+ :param *types: KwargInfo entries for each keyword argument.
+ """
+ def inner(f: TV_func) -> TV_func:
+
+ @wraps(f)
+ def wrapper(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+ kwargs, subproject = get_callee_args(wrapped_args, want_subproject=True)[3:5]
+
+ all_names = {t.name for t in types}
+ unknowns = set(kwargs).difference(all_names)
+ if unknowns:
+ # Warn about unknown argumnts, delete them and continue. This
+ # keeps current behavior
+ ustr = ', '.join([f'"{u}"' for u in sorted(unknowns)])
+ mlog.warning(f'{name} got unknown keyword arguments {ustr}')
+ for u in unknowns:
+ del kwargs[u]
+
+ for info in types:
+ value = kwargs.get(info.name)
+ if value is not None:
+ if info.since:
+ feature_name = info.name + ' arg in ' + name
+ FeatureNew.single_use(feature_name, info.since, subproject)
+ if info.deprecated:
+ feature_name = info.name + ' arg in ' + name
+ FeatureDeprecated.single_use(feature_name, info.deprecated, subproject)
+ if info.listify:
+ kwargs[info.name] = value = mesonlib.listify(value)
+ if isinstance(info.types, ContainerTypeInfo):
+ msg = info.types.check(value)
+ if msg is not None:
+ raise InvalidArguments(f'{name} keyword argument "{info.name}" {msg}')
+ else:
+ if not isinstance(value, info.types):
+ if isinstance(info.types, tuple):
+ shouldbe = 'one of: {}'.format(", ".join(f'"{t.__name__}"' for t in info.types))
+ else:
+ shouldbe = f'"{info.types.__name__}"'
+ raise InvalidArguments(f'{name} keyword argument "{info.name}"" was of type "{type(value).__name__}" but should have been {shouldbe}')
+
+ if info.validator is not None:
+ msg = info.validator(value)
+ if msg is not None:
+ raise InvalidArguments(f'{name} keyword argument "{info.name}" {msg}')
+
+ warn: bool
+ if info.deprecated_values is not None:
+ for n, version in info.deprecated_values.items():
+ if isinstance(value, (dict, list)):
+ warn = n in value
+ else:
+ warn = n == value
+
+ if warn:
+ FeatureDeprecated.single_use(f'"{name}" keyword argument "{info.name}" value "{n}"', version, subproject)
+
+ if info.since_values is not None:
+ for n, version in info.since_values.items():
+ if isinstance(value, (dict, list)):
+ warn = n in value
+ else:
+ warn = n == value
+
+ if warn:
+ FeatureNew.single_use(f'"{name}" keyword argument "{info.name}" value "{n}"', version, subproject)
+
+ elif info.required:
+ raise InvalidArguments(f'{name} is missing required keyword argument "{info.name}"')
+ else:
+ # set the value to the default, this ensuring all kwargs are present
+ # This both simplifies the typing checking and the usage
+ # Create a shallow copy of the container (and do a type
+ # conversion if necessary). This allows mutable types to
+ # be used safely as default values
+ if isinstance(info.types, ContainerTypeInfo):
+ kwargs[info.name] = info.types.container(info.default)
+ else:
+ kwargs[info.name] = info.default
+ if info.not_set_warning:
+ mlog.warning(info.not_set_warning)
+
+ if info.convertor:
+ kwargs[info.name] = info.convertor(kwargs[info.name])
+
+ return f(*wrapped_args, **wrapped_kwargs)
+ return T.cast(TV_func, wrapper)
+ return inner
+
+
+class FeatureCheckBase(metaclass=abc.ABCMeta):
+ "Base class for feature version checks"
+
+ # In python 3.6 we can just forward declare this, but in 3.5 we can't
+ # This will be overwritten by the subclasses by necessity
+ feature_registry = {} # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]]
+
+ def __init__(self, feature_name: str, version: str, extra_message: T.Optional[str] = None):
+ self.feature_name = feature_name # type: str
+ self.feature_version = version # type: str
+ self.extra_message = extra_message or '' # type: str
+
+ @staticmethod
+ def get_target_version(subproject: str) -> str:
+ # Don't do any checks if project() has not been parsed yet
+ if subproject not in mesonlib.project_meson_versions:
+ return ''
+ return mesonlib.project_meson_versions[subproject]
+
+ @staticmethod
+ @abc.abstractmethod
+ def check_version(target_version: str, feature_Version: str) -> bool:
+ pass
+
+ def use(self, subproject: str) -> None:
+ tv = self.get_target_version(subproject)
+ # No target version
+ if tv == '':
+ return
+ # Target version is new enough
+ if self.check_version(tv, self.feature_version):
+ return
+ # Feature is too new for target version, register it
+ if subproject not in self.feature_registry:
+ self.feature_registry[subproject] = {self.feature_version: set()}
+ register = self.feature_registry[subproject]
+ if self.feature_version not in register:
+ register[self.feature_version] = set()
+ if self.feature_name in register[self.feature_version]:
+ # Don't warn about the same feature multiple times
+ # FIXME: This is needed to prevent duplicate warnings, but also
+ # means we won't warn about a feature used in multiple places.
+ return
+ register[self.feature_version].add(self.feature_name)
+ self.log_usage_warning(tv)
+
+ @classmethod
+ def report(cls, subproject: str) -> None:
+ if subproject not in cls.feature_registry:
+ return
+ warning_str = cls.get_warning_str_prefix(cls.get_target_version(subproject))
+ fv = cls.feature_registry[subproject]
+ for version in sorted(fv.keys()):
+ warning_str += '\n * {}: {}'.format(version, fv[version])
+ mlog.warning(warning_str)
+
+ def log_usage_warning(self, tv: str) -> None:
+ raise InterpreterException('log_usage_warning not implemented')
+
+ @staticmethod
+ def get_warning_str_prefix(tv: str) -> str:
+ raise InterpreterException('get_warning_str_prefix not implemented')
+
+ def __call__(self, f: TV_func) -> TV_func:
+ @wraps(f)
+ def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+ subproject = get_callee_args(wrapped_args, want_subproject=True)[4]
+ if subproject is None:
+ raise AssertionError(f'{wrapped_args!r}')
+ self.use(subproject)
+ return f(*wrapped_args, **wrapped_kwargs)
+ return T.cast(TV_func, wrapped)
+
+ @classmethod
+ def single_use(cls, feature_name: str, version: str, subproject: str,
+ extra_message: T.Optional[str] = None) -> None:
+ """Oneline version that instantiates and calls use()."""
+ cls(feature_name, version, extra_message).use(subproject)
+
+
+class FeatureNew(FeatureCheckBase):
+ """Checks for new features"""
+
+ # Class variable, shared across all instances
+ #
+ # Format: {subproject: {feature_version: set(feature_names)}}
+ feature_registry = {} # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]]
+
+ @staticmethod
+ def check_version(target_version: str, feature_version: str) -> bool:
+ return mesonlib.version_compare_condition_with_min(target_version, feature_version)
+
+ @staticmethod
+ def get_warning_str_prefix(tv: str) -> str:
+ return f'Project specifies a minimum meson_version \'{tv}\' but uses features which were added in newer versions:'
+
+ def log_usage_warning(self, tv: str) -> None:
+ args = [
+ 'Project targeting', f"'{tv}'",
+ 'but tried to use feature introduced in',
+ f"'{self.feature_version}':",
+ f'{self.feature_name}.',
+ ]
+ if self.extra_message:
+ args.append(self.extra_message)
+ mlog.warning(*args)
+
+class FeatureDeprecated(FeatureCheckBase):
+ """Checks for deprecated features"""
+
+ # Class variable, shared across all instances
+ #
+ # Format: {subproject: {feature_version: set(feature_names)}}
+ feature_registry = {} # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]]
+
+ @staticmethod
+ def check_version(target_version: str, feature_version: str) -> bool:
+ # For deprecation checks we need to return the inverse of FeatureNew checks
+ return not mesonlib.version_compare_condition_with_min(target_version, feature_version)
+
+ @staticmethod
+ def get_warning_str_prefix(tv: str) -> str:
+ return 'Deprecated features used:'
+
+ def log_usage_warning(self, tv: str) -> None:
+ args = [
+ 'Project targeting', f"'{tv}'",
+ 'but tried to use feature deprecated since',
+ f"'{self.feature_version}':",
+ f'{self.feature_name}.',
+ ]
+ if self.extra_message:
+ args.append(self.extra_message)
+ mlog.warning(*args)
+
+
+class FeatureCheckKwargsBase(metaclass=abc.ABCMeta):
+
+ @property
+ @abc.abstractmethod
+ def feature_check_class(self) -> T.Type[FeatureCheckBase]:
+ pass
+
+ def __init__(self, feature_name: str, feature_version: str,
+ kwargs: T.List[str], extra_message: T.Optional[str] = None):
+ self.feature_name = feature_name
+ self.feature_version = feature_version
+ self.kwargs = kwargs
+ self.extra_message = extra_message
+
+ def __call__(self, f: TV_func) -> TV_func:
+ @wraps(f)
+ def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+ kwargs, subproject = get_callee_args(wrapped_args, want_subproject=True)[3:5]
+ if subproject is None:
+ raise AssertionError(f'{wrapped_args!r}')
+ for arg in self.kwargs:
+ if arg not in kwargs:
+ continue
+ name = arg + ' arg in ' + self.feature_name
+ self.feature_check_class.single_use(
+ name, self.feature_version, subproject, self.extra_message)
+ return f(*wrapped_args, **wrapped_kwargs)
+ return T.cast(TV_func, wrapped)
+
+class FeatureNewKwargs(FeatureCheckKwargsBase):
+ feature_check_class = FeatureNew
+
+class FeatureDeprecatedKwargs(FeatureCheckKwargsBase):
+ feature_check_class = FeatureDeprecated
diff --git a/meson/mesonbuild/interpreterbase/disabler.py b/meson/mesonbuild/interpreterbase/disabler.py
new file mode 100644
index 000000000..81f526466
--- /dev/null
+++ b/meson/mesonbuild/interpreterbase/disabler.py
@@ -0,0 +1,42 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .baseobjects import MesonInterpreterObject
+import typing as T
+
+class Disabler(MesonInterpreterObject):
+ def __init__(self) -> None:
+ super().__init__()
+ self.methods.update({'found': self.found_method})
+
+ def found_method(self, args: T.Sequence[T.Any], kwargs: T.Dict[str, T.Any]) -> bool:
+ return False
+
+def _is_arg_disabled(arg: T.Any) -> bool:
+ if isinstance(arg, Disabler):
+ return True
+ if isinstance(arg, list):
+ for i in arg:
+ if _is_arg_disabled(i):
+ return True
+ return False
+
+def is_disabled(args: T.Sequence[T.Any], kwargs: T.Dict[str, T.Any]) -> bool:
+ for i in args:
+ if _is_arg_disabled(i):
+ return True
+ for i in kwargs.values():
+ if _is_arg_disabled(i):
+ return True
+ return False
diff --git a/meson/mesonbuild/interpreterbase/exceptions.py b/meson/mesonbuild/interpreterbase/exceptions.py
new file mode 100644
index 000000000..cdbe0fb3b
--- /dev/null
+++ b/meson/mesonbuild/interpreterbase/exceptions.py
@@ -0,0 +1,33 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ..mesonlib import MesonException
+
+class InterpreterException(MesonException):
+ pass
+
+class InvalidCode(InterpreterException):
+ pass
+
+class InvalidArguments(InterpreterException):
+ pass
+
+class SubdirDoneRequest(BaseException):
+ pass
+
+class ContinueRequest(BaseException):
+ pass
+
+class BreakRequest(BaseException):
+ pass
diff --git a/meson/mesonbuild/interpreterbase/helpers.py b/meson/mesonbuild/interpreterbase/helpers.py
new file mode 100644
index 000000000..235257795
--- /dev/null
+++ b/meson/mesonbuild/interpreterbase/helpers.py
@@ -0,0 +1,118 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mesonlib, mparser, mlog
+from .exceptions import InvalidArguments, InterpreterException
+
+import collections.abc
+import typing as T
+
+if T.TYPE_CHECKING:
+ from .baseobjects import TYPE_var, TYPE_kwargs
+
+def flatten(args: T.Union['TYPE_var', T.List['TYPE_var']]) -> T.List['TYPE_var']:
+ if isinstance(args, mparser.StringNode):
+ assert isinstance(args.value, str)
+ return [args.value]
+ if not isinstance(args, collections.abc.Sequence):
+ return [args]
+ result: T.List['TYPE_var'] = []
+ for a in args:
+ if isinstance(a, list):
+ rest = flatten(a)
+ result = result + rest
+ elif isinstance(a, mparser.StringNode):
+ result.append(a.value)
+ else:
+ result.append(a)
+ return result
+
+def resolve_second_level_holders(args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> T.Tuple[T.List['TYPE_var'], 'TYPE_kwargs']:
+ def resolver(arg: 'TYPE_var') -> 'TYPE_var':
+ if isinstance(arg, list):
+ return [resolver(x) for x in arg]
+ if isinstance(arg, dict):
+ return {k: resolver(v) for k, v in arg.items()}
+ if isinstance(arg, mesonlib.SecondLevelHolder):
+ return arg.get_default_object()
+ return arg
+ return [resolver(x) for x in args], {k: resolver(v) for k, v in kwargs.items()}
+
+def check_stringlist(a: T.Any, msg: str = 'Arguments must be strings.') -> None:
+ if not isinstance(a, list):
+ mlog.debug('Not a list:', str(a))
+ raise InvalidArguments('Argument not a list.')
+ if not all(isinstance(s, str) for s in a):
+ mlog.debug('Element not a string:', str(a))
+ raise InvalidArguments(msg)
+
+def default_resolve_key(key: mparser.BaseNode) -> str:
+ if not isinstance(key, mparser.IdNode):
+ raise InterpreterException('Invalid kwargs format.')
+ return key.value
+
+def get_callee_args(wrapped_args: T.Sequence[T.Any], want_subproject: bool = False) -> T.Tuple[T.Any, mparser.BaseNode, T.List['TYPE_var'], 'TYPE_kwargs', T.Optional[str]]:
+ s = wrapped_args[0]
+ n = len(wrapped_args)
+ # Raise an error if the codepaths are not there
+ subproject = None # type: T.Optional[str]
+ if want_subproject and n == 2:
+ if hasattr(s, 'subproject'):
+ # Interpreter base types have 2 args: self, node
+ node = wrapped_args[1]
+ # args and kwargs are inside the node
+ args = None
+ kwargs = None
+ subproject = s.subproject
+ elif hasattr(wrapped_args[1], 'subproject'):
+ # Module objects have 2 args: self, interpreter
+ node = wrapped_args[1].current_node
+ # args and kwargs are inside the node
+ args = None
+ kwargs = None
+ subproject = wrapped_args[1].subproject
+ else:
+ raise AssertionError(f'Unknown args: {wrapped_args!r}')
+ elif n == 3:
+ # Methods on objects (*Holder, MesonMain, etc) have 3 args: self, args, kwargs
+ node = s.current_node
+ args = wrapped_args[1]
+ kwargs = wrapped_args[2]
+ if want_subproject:
+ if hasattr(s, 'subproject'):
+ subproject = s.subproject
+ elif hasattr(s, 'interpreter'):
+ subproject = s.interpreter.subproject
+ elif n == 4:
+ # Meson functions have 4 args: self, node, args, kwargs
+ # Module functions have 4 args: self, state, args, kwargs
+ from .interpreterbase import InterpreterBase # TODO: refactor to avoid this import
+ if isinstance(s, InterpreterBase):
+ node = wrapped_args[1]
+ else:
+ node = wrapped_args[1].current_node
+ args = wrapped_args[2]
+ kwargs = wrapped_args[3]
+ if want_subproject:
+ if isinstance(s, InterpreterBase):
+ subproject = s.subproject
+ else:
+ subproject = wrapped_args[1].subproject
+ else:
+ raise AssertionError(f'Unknown args: {wrapped_args!r}')
+ # Sometimes interpreter methods are called internally with None instead of
+ # empty list/dict
+ args = args if args is not None else []
+ kwargs = kwargs if kwargs is not None else {}
+ return s, node, args, kwargs, subproject
diff --git a/meson/mesonbuild/interpreterbase/interpreterbase.py b/meson/mesonbuild/interpreterbase/interpreterbase.py
new file mode 100644
index 000000000..115e24be0
--- /dev/null
+++ b/meson/mesonbuild/interpreterbase/interpreterbase.py
@@ -0,0 +1,959 @@
+# Copyright 2016-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+from .. import mparser, mesonlib, mlog
+from .. import environment
+
+from .baseobjects import (
+ InterpreterObject,
+ MesonInterpreterObject,
+ MutableInterpreterObject,
+ InterpreterObjectTypeVar,
+ ObjectHolder,
+ RangeHolder,
+
+ TYPE_elementary,
+ TYPE_var,
+ TYPE_kwargs,
+)
+
+from .exceptions import (
+ InterpreterException,
+ InvalidCode,
+ InvalidArguments,
+ SubdirDoneRequest,
+ ContinueRequest,
+ BreakRequest
+)
+
+from .decorators import FeatureNew, builtinMethodNoKwargs
+from .disabler import Disabler, is_disabled
+from .helpers import check_stringlist, default_resolve_key, flatten, resolve_second_level_holders
+from ._unholder import _unholder
+
+import os, copy, re
+import typing as T
+
+if T.TYPE_CHECKING:
+ from ..interpreter import Interpreter
+
+HolderMapType = T.Dict[
+ T.Type[mesonlib.HoldableObject],
+ # For some reason, this has to be a callable and can't just be ObjectHolder[InterpreterObjectTypeVar]
+ T.Callable[[InterpreterObjectTypeVar, 'Interpreter'], ObjectHolder[InterpreterObjectTypeVar]]
+]
+
+FunctionType = T.Dict[
+ str,
+ T.Callable[[mparser.BaseNode, T.List[TYPE_var], T.Dict[str, TYPE_var]], TYPE_var]
+]
+
+class MesonVersionString(str):
+ pass
+
+class InterpreterBase:
+ elementary_types = (int, str, bool, list)
+
+ def __init__(self, source_root: str, subdir: str, subproject: str):
+ self.source_root = source_root
+ self.funcs: FunctionType = {}
+ self.builtin: T.Dict[str, InterpreterObject] = {}
+ # Holder maps store a mapping from an HoldableObject to a class ObjectHolder
+ self.holder_map: HolderMapType = {}
+ self.bound_holder_map: HolderMapType = {}
+ self.subdir = subdir
+ self.root_subdir = subdir
+ self.subproject = subproject
+ # TODO: This should actually be more strict: T.Union[TYPE_elementary, InterpreterObject]
+ self.variables: T.Dict[str, T.Union[TYPE_var, InterpreterObject]] = {}
+ self.argument_depth = 0
+ self.current_lineno = -1
+ # Current node set during a function call. This can be used as location
+ # when printing a warning message during a method call.
+ self.current_node = None # type: mparser.BaseNode
+ # This is set to `version_string` when this statement is evaluated:
+ # meson.version().compare_version(version_string)
+ # If it was part of a if-clause, it is used to temporally override the
+ # current meson version target within that if-block.
+ self.tmp_meson_version = None # type: T.Optional[str]
+
+ def load_root_meson_file(self) -> None:
+ mesonfile = os.path.join(self.source_root, self.subdir, environment.build_filename)
+ if not os.path.isfile(mesonfile):
+ raise InvalidArguments('Missing Meson file in %s' % mesonfile)
+ with open(mesonfile, encoding='utf-8') as mf:
+ code = mf.read()
+ if code.isspace():
+ raise InvalidCode('Builder file is empty.')
+ assert(isinstance(code, str))
+ try:
+ self.ast = mparser.Parser(code, mesonfile).parse()
+ except mesonlib.MesonException as me:
+ me.file = mesonfile
+ raise me
+
+ def join_path_strings(self, args: T.Sequence[str]) -> str:
+ return os.path.join(*args).replace('\\', '/')
+
+ def parse_project(self) -> None:
+ """
+ Parses project() and initializes languages, compilers etc. Do this
+ early because we need this before we parse the rest of the AST.
+ """
+ self.evaluate_codeblock(self.ast, end=1)
+
+ def sanity_check_ast(self) -> None:
+ if not isinstance(self.ast, mparser.CodeBlockNode):
+ raise InvalidCode('AST is of invalid type. Possibly a bug in the parser.')
+ if not self.ast.lines:
+ raise InvalidCode('No statements in code.')
+ first = self.ast.lines[0]
+ if not isinstance(first, mparser.FunctionNode) or first.func_name != 'project':
+ raise InvalidCode('First statement must be a call to project')
+
+ def run(self) -> None:
+ # Evaluate everything after the first line, which is project() because
+ # we already parsed that in self.parse_project()
+ try:
+ self.evaluate_codeblock(self.ast, start=1)
+ except SubdirDoneRequest:
+ pass
+
+ def evaluate_codeblock(self, node: mparser.CodeBlockNode, start: int = 0, end: T.Optional[int] = None) -> None:
+ if node is None:
+ return
+ if not isinstance(node, mparser.CodeBlockNode):
+ e = InvalidCode('Tried to execute a non-codeblock. Possibly a bug in the parser.')
+ e.lineno = node.lineno
+ e.colno = node.colno
+ raise e
+ statements = node.lines[start:end]
+ i = 0
+ while i < len(statements):
+ cur = statements[i]
+ try:
+ self.current_lineno = cur.lineno
+ self.evaluate_statement(cur)
+ except Exception as e:
+ if getattr(e, 'lineno', None) is None:
+ # We are doing the equivalent to setattr here and mypy does not like it
+ e.lineno = cur.lineno # type: ignore
+ e.colno = cur.colno # type: ignore
+ e.file = os.path.join(self.source_root, self.subdir, environment.build_filename) # type: ignore
+ raise e
+ i += 1 # In THE FUTURE jump over blocks and stuff.
+
+ def evaluate_statement(self, cur: mparser.BaseNode) -> T.Optional[T.Union[TYPE_var, InterpreterObject]]:
+ self.current_node = cur
+ if isinstance(cur, mparser.FunctionNode):
+ return self.function_call(cur)
+ elif isinstance(cur, mparser.AssignmentNode):
+ self.assignment(cur)
+ elif isinstance(cur, mparser.MethodNode):
+ return self.method_call(cur)
+ elif isinstance(cur, mparser.StringNode):
+ return cur.value
+ elif isinstance(cur, mparser.BooleanNode):
+ return cur.value
+ elif isinstance(cur, mparser.IfClauseNode):
+ return self.evaluate_if(cur)
+ elif isinstance(cur, mparser.IdNode):
+ return self.get_variable(cur.value)
+ elif isinstance(cur, mparser.ComparisonNode):
+ return self.evaluate_comparison(cur)
+ elif isinstance(cur, mparser.ArrayNode):
+ return self.evaluate_arraystatement(cur)
+ elif isinstance(cur, mparser.DictNode):
+ return self.evaluate_dictstatement(cur)
+ elif isinstance(cur, mparser.NumberNode):
+ return cur.value
+ elif isinstance(cur, mparser.AndNode):
+ return self.evaluate_andstatement(cur)
+ elif isinstance(cur, mparser.OrNode):
+ return self.evaluate_orstatement(cur)
+ elif isinstance(cur, mparser.NotNode):
+ return self.evaluate_notstatement(cur)
+ elif isinstance(cur, mparser.UMinusNode):
+ return self.evaluate_uminusstatement(cur)
+ elif isinstance(cur, mparser.ArithmeticNode):
+ return self.evaluate_arithmeticstatement(cur)
+ elif isinstance(cur, mparser.ForeachClauseNode):
+ self.evaluate_foreach(cur)
+ elif isinstance(cur, mparser.PlusAssignmentNode):
+ self.evaluate_plusassign(cur)
+ elif isinstance(cur, mparser.IndexNode):
+ return self.evaluate_indexing(cur)
+ elif isinstance(cur, mparser.TernaryNode):
+ return self.evaluate_ternary(cur)
+ elif isinstance(cur, mparser.FormatStringNode):
+ return self.evaluate_fstring(cur)
+ elif isinstance(cur, mparser.ContinueNode):
+ raise ContinueRequest()
+ elif isinstance(cur, mparser.BreakNode):
+ raise BreakRequest()
+ elif isinstance(cur, self.elementary_types):
+ return cur
+ else:
+ raise InvalidCode("Unknown statement.")
+ return None
+
+ def evaluate_arraystatement(self, cur: mparser.ArrayNode) -> T.List[T.Union[TYPE_var, InterpreterObject]]:
+ (arguments, kwargs) = self.reduce_arguments(cur.args)
+ if len(kwargs) > 0:
+ raise InvalidCode('Keyword arguments are invalid in array construction.')
+ return arguments
+
+ @FeatureNew('dict', '0.47.0')
+ def evaluate_dictstatement(self, cur: mparser.DictNode) -> T.Union[TYPE_var, InterpreterObject]:
+ def resolve_key(key: mparser.BaseNode) -> str:
+ if not isinstance(key, mparser.StringNode):
+ FeatureNew.single_use('Dictionary entry using non literal key', '0.53.0', self.subproject)
+ str_key = self.evaluate_statement(key)
+ if not isinstance(str_key, str):
+ raise InvalidArguments('Key must be a string')
+ return str_key
+ arguments, kwargs = self.reduce_arguments(cur.args, key_resolver=resolve_key, duplicate_key_error='Duplicate dictionary key: {}')
+ assert not arguments
+ return kwargs
+
+ def evaluate_notstatement(self, cur: mparser.NotNode) -> T.Union[bool, Disabler]:
+ v = self.evaluate_statement(cur.value)
+ if isinstance(v, Disabler):
+ return v
+ if not isinstance(v, bool):
+ raise InterpreterException('Argument to "not" is not a boolean.')
+ return not v
+
+ def evaluate_if(self, node: mparser.IfClauseNode) -> T.Optional[Disabler]:
+ assert(isinstance(node, mparser.IfClauseNode))
+ for i in node.ifs:
+ # Reset self.tmp_meson_version to know if it gets set during this
+ # statement evaluation.
+ self.tmp_meson_version = None
+ result = self.evaluate_statement(i.condition)
+ if isinstance(result, Disabler):
+ return result
+ if not(isinstance(result, bool)):
+ raise InvalidCode(f'If clause {result!r} does not evaluate to true or false.')
+ if result:
+ prev_meson_version = mesonlib.project_meson_versions[self.subproject]
+ if self.tmp_meson_version:
+ mesonlib.project_meson_versions[self.subproject] = self.tmp_meson_version
+ try:
+ self.evaluate_codeblock(i.block)
+ finally:
+ mesonlib.project_meson_versions[self.subproject] = prev_meson_version
+ return None
+ if not isinstance(node.elseblock, mparser.EmptyNode):
+ self.evaluate_codeblock(node.elseblock)
+ return None
+
+ def validate_comparison_types(self, val1: T.Any, val2: T.Any) -> bool:
+ if type(val1) != type(val2):
+ return False
+ return True
+
+ def evaluate_in(self, val1: T.Any, val2: T.Any) -> bool:
+ if not isinstance(val1, (str, int, float, mesonlib.HoldableObject)):
+ raise InvalidArguments('lvalue of "in" operator must be a string, integer, float, or object')
+ if not isinstance(val2, (list, dict)):
+ raise InvalidArguments('rvalue of "in" operator must be an array or a dict')
+ return val1 in val2
+
+ def evaluate_comparison(self, node: mparser.ComparisonNode) -> T.Union[bool, Disabler]:
+ val1 = self.evaluate_statement(node.left)
+ if isinstance(val1, Disabler):
+ return val1
+ val2 = self.evaluate_statement(node.right)
+ if isinstance(val2, Disabler):
+ return val2
+ # Do not compare the ObjectHolders but the actual held objects
+ val1 = _unholder(val1)
+ val2 = _unholder(val2)
+ if node.ctype == 'in':
+ return self.evaluate_in(val1, val2)
+ elif node.ctype == 'notin':
+ return not self.evaluate_in(val1, val2)
+ valid = self.validate_comparison_types(val1, val2)
+ # Ordering comparisons of different types isn't allowed since PR #1810
+ # (0.41.0). Since PR #2884 we also warn about equality comparisons of
+ # different types, which will one day become an error.
+ if not valid and (node.ctype == '==' or node.ctype == '!='):
+ mlog.warning('''Trying to compare values of different types ({}, {}) using {}.
+The result of this is undefined and will become a hard error in a future Meson release.'''
+ .format(type(val1).__name__, type(val2).__name__, node.ctype), location=node)
+ if node.ctype == '==':
+ return val1 == val2
+ elif node.ctype == '!=':
+ return val1 != val2
+ elif not valid:
+ raise InterpreterException(
+ 'Values of different types ({}, {}) cannot be compared using {}.'.format(type(val1).__name__,
+ type(val2).__name__,
+ node.ctype))
+ elif not isinstance(val1, self.elementary_types):
+ raise InterpreterException('{} can only be compared for equality.'.format(getattr(node.left, 'value', '<ERROR>')))
+ elif not isinstance(val2, self.elementary_types):
+ raise InterpreterException('{} can only be compared for equality.'.format(getattr(node.right, 'value', '<ERROR>')))
+ # Use type: ignore because mypy will complain that we are comparing two Unions,
+ # but we actually guarantee earlier that both types are the same
+ elif node.ctype == '<':
+ return val1 < val2 # type: ignore
+ elif node.ctype == '<=':
+ return val1 <= val2 # type: ignore
+ elif node.ctype == '>':
+ return val1 > val2 # type: ignore
+ elif node.ctype == '>=':
+ return val1 >= val2 # type: ignore
+ else:
+ raise InvalidCode('You broke my compare eval.')
+
+ def evaluate_andstatement(self, cur: mparser.AndNode) -> T.Union[bool, Disabler]:
+ l = self.evaluate_statement(cur.left)
+ if isinstance(l, Disabler):
+ return l
+ if not isinstance(l, bool):
+ raise InterpreterException('First argument to "and" is not a boolean.')
+ if not l:
+ return False
+ r = self.evaluate_statement(cur.right)
+ if isinstance(r, Disabler):
+ return r
+ if not isinstance(r, bool):
+ raise InterpreterException('Second argument to "and" is not a boolean.')
+ return r
+
+ def evaluate_orstatement(self, cur: mparser.OrNode) -> T.Union[bool, Disabler]:
+ l = self.evaluate_statement(cur.left)
+ if isinstance(l, Disabler):
+ return l
+ if not isinstance(l, bool):
+ raise InterpreterException('First argument to "or" is not a boolean.')
+ if l:
+ return True
+ r = self.evaluate_statement(cur.right)
+ if isinstance(r, Disabler):
+ return r
+ if not isinstance(r, bool):
+ raise InterpreterException('Second argument to "or" is not a boolean.')
+ return r
+
+ def evaluate_uminusstatement(self, cur: mparser.UMinusNode) -> T.Union[int, Disabler]:
+ v = self.evaluate_statement(cur.value)
+ if isinstance(v, Disabler):
+ return v
+ if not isinstance(v, int):
+ raise InterpreterException('Argument to negation is not an integer.')
+ return -v
+
+ @FeatureNew('/ with string arguments', '0.49.0')
+ def evaluate_path_join(self, l: str, r: str) -> str:
+ if not isinstance(l, str):
+ raise InvalidCode('The division operator can only append to a string.')
+ if not isinstance(r, str):
+ raise InvalidCode('The division operator can only append a string.')
+ return self.join_path_strings((l, r))
+
+ def evaluate_division(self, l: T.Any, r: T.Any) -> T.Union[int, str]:
+ if isinstance(l, str) or isinstance(r, str):
+ return self.evaluate_path_join(l, r)
+ if isinstance(l, int) and isinstance(r, int):
+ if r == 0:
+ raise InvalidCode('Division by zero.')
+ return l // r
+ raise InvalidCode('Division works only with strings or integers.')
+
+ def evaluate_arithmeticstatement(self, cur: mparser.ArithmeticNode) -> T.Union[int, str, dict, list, Disabler]:
+ l = self.evaluate_statement(cur.left)
+ if isinstance(l, Disabler):
+ return l
+ r = self.evaluate_statement(cur.right)
+ if isinstance(r, Disabler):
+ return r
+
+ if cur.operation == 'add':
+ if isinstance(l, dict) and isinstance(r, dict):
+ return {**l, **r}
+ try:
+ # MyPy error due to handling two Unions (we are catching all exceptions anyway)
+ return l + r # type: ignore
+ except Exception as e:
+ raise InvalidCode('Invalid use of addition: ' + str(e))
+ elif cur.operation == 'sub':
+ if not isinstance(l, int) or not isinstance(r, int):
+ raise InvalidCode('Subtraction works only with integers.')
+ return l - r
+ elif cur.operation == 'mul':
+ if not isinstance(l, int) or not isinstance(r, int):
+ raise InvalidCode('Multiplication works only with integers.')
+ return l * r
+ elif cur.operation == 'div':
+ return self.evaluate_division(l, r)
+ elif cur.operation == 'mod':
+ if not isinstance(l, int) or not isinstance(r, int):
+ raise InvalidCode('Modulo works only with integers.')
+ return l % r
+ else:
+ raise InvalidCode('You broke me.')
+
+ def evaluate_ternary(self, node: mparser.TernaryNode) -> T.Union[TYPE_var, InterpreterObject]:
+ assert(isinstance(node, mparser.TernaryNode))
+ result = self.evaluate_statement(node.condition)
+ if isinstance(result, Disabler):
+ return result
+ if not isinstance(result, bool):
+ raise InterpreterException('Ternary condition is not boolean.')
+ if result:
+ return self.evaluate_statement(node.trueblock)
+ else:
+ return self.evaluate_statement(node.falseblock)
+
+ @FeatureNew('format strings', '0.58.0')
+ def evaluate_fstring(self, node: mparser.FormatStringNode) -> TYPE_var:
+ assert(isinstance(node, mparser.FormatStringNode))
+
+ def replace(match: T.Match[str]) -> str:
+ var = str(match.group(1))
+ try:
+ val = self.variables[var]
+ if not isinstance(val, (str, int, float, bool)):
+ raise InvalidCode(f'Identifier "{var}" does not name a formattable variable ' +
+ '(has to be an integer, a string, a floating point number or a boolean).')
+
+ return str(val)
+ except KeyError:
+ raise InvalidCode(f'Identifier "{var}" does not name a variable.')
+
+ return re.sub(r'@([_a-zA-Z][_0-9a-zA-Z]*)@', replace, node.value)
+
+ def evaluate_foreach(self, node: mparser.ForeachClauseNode) -> None:
+ assert(isinstance(node, mparser.ForeachClauseNode))
+ items = self.evaluate_statement(node.items)
+
+ if isinstance(items, (list, RangeHolder)):
+ if len(node.varnames) != 1:
+ raise InvalidArguments('Foreach on array does not unpack')
+ varname = node.varnames[0]
+ for item in items:
+ self.set_variable(varname, item)
+ try:
+ self.evaluate_codeblock(node.block)
+ except ContinueRequest:
+ continue
+ except BreakRequest:
+ break
+ elif isinstance(items, dict):
+ if len(node.varnames) != 2:
+ raise InvalidArguments('Foreach on dict unpacks key and value')
+ for key, value in sorted(items.items()):
+ self.set_variable(node.varnames[0], key)
+ self.set_variable(node.varnames[1], value)
+ try:
+ self.evaluate_codeblock(node.block)
+ except ContinueRequest:
+ continue
+ except BreakRequest:
+ break
+ else:
+ raise InvalidArguments('Items of foreach loop must be an array or a dict')
+
+ def evaluate_plusassign(self, node: mparser.PlusAssignmentNode) -> None:
+ assert(isinstance(node, mparser.PlusAssignmentNode))
+ varname = node.var_name
+ addition = self.evaluate_statement(node.value)
+
+ # Remember that all variables are immutable. We must always create a
+ # full new variable and then assign it.
+ old_variable = self.get_variable(varname)
+ new_value = None # type: T.Union[str, int, float, bool, dict, list]
+ if isinstance(old_variable, str):
+ if not isinstance(addition, str):
+ raise InvalidArguments('The += operator requires a string on the right hand side if the variable on the left is a string')
+ new_value = old_variable + addition
+ elif isinstance(old_variable, int):
+ if not isinstance(addition, int):
+ raise InvalidArguments('The += operator requires an int on the right hand side if the variable on the left is an int')
+ new_value = old_variable + addition
+ elif isinstance(old_variable, list):
+ if isinstance(addition, list):
+ new_value = old_variable + addition
+ else:
+ new_value = old_variable + [addition]
+ elif isinstance(old_variable, dict):
+ if not isinstance(addition, dict):
+ raise InvalidArguments('The += operator requires a dict on the right hand side if the variable on the left is a dict')
+ new_value = {**old_variable, **addition}
+ # Add other data types here.
+ else:
+ raise InvalidArguments('The += operator currently only works with arrays, dicts, strings or ints')
+ self.set_variable(varname, new_value)
+
+ def evaluate_indexing(self, node: mparser.IndexNode) -> T.Union[TYPE_elementary, InterpreterObject]:
+ assert(isinstance(node, mparser.IndexNode))
+ iobject = self.evaluate_statement(node.iobject)
+ if isinstance(iobject, Disabler):
+ return iobject
+ if not hasattr(iobject, '__getitem__'):
+ raise InterpreterException(
+ 'Tried to index an object that doesn\'t support indexing.')
+ index = self.evaluate_statement(node.index)
+
+ if isinstance(iobject, dict):
+ if not isinstance(index, str):
+ raise InterpreterException('Key is not a string')
+ try:
+ # The cast is required because we don't have recursive types...
+ return T.cast(T.Union[TYPE_elementary, InterpreterObject], iobject[index])
+ except KeyError:
+ raise InterpreterException('Key %s is not in dict' % index)
+ else:
+ if not isinstance(index, int):
+ raise InterpreterException('Index value is not an integer.')
+ try:
+ # Ignore the MyPy error, since we don't know all indexable types here
+ # and we handle non indexable types with an exception
+ # TODO maybe find a better solution
+ res = iobject[index] # type: ignore
+ # Only holderify if we are dealing with `InterpreterObject`, since raw
+ # lists already store ObjectHolders
+ if isinstance(iobject, InterpreterObject):
+ return self._holderify(res)
+ else:
+ return res
+ except IndexError:
+ # We are already checking for the existence of __getitem__, so this should be save
+ raise InterpreterException('Index %d out of bounds of array of size %d.' % (index, len(iobject))) # type: ignore
+
+ def function_call(self, node: mparser.FunctionNode) -> T.Optional[T.Union[TYPE_elementary, InterpreterObject]]:
+ func_name = node.func_name
+ (h_posargs, h_kwargs) = self.reduce_arguments(node.args)
+ (posargs, kwargs) = self._unholder_args(h_posargs, h_kwargs)
+ if is_disabled(posargs, kwargs) and func_name not in {'get_variable', 'set_variable', 'is_disabler'}:
+ return Disabler()
+ if func_name in self.funcs:
+ func = self.funcs[func_name]
+ func_args = posargs
+ if not getattr(func, 'no-args-flattening', False):
+ func_args = flatten(posargs)
+ if not getattr(func, 'no-second-level-holder-flattening', False):
+ func_args, kwargs = resolve_second_level_holders(func_args, kwargs)
+ res = func(node, func_args, kwargs)
+ return self._holderify(res)
+ else:
+ self.unknown_function_called(func_name)
+ return None
+
+ def method_call(self, node: mparser.MethodNode) -> T.Optional[T.Union[TYPE_var, InterpreterObject]]:
+ invokable = node.source_object
+ obj: T.Union[TYPE_var, InterpreterObject]
+ if isinstance(invokable, mparser.IdNode):
+ object_name = invokable.value
+ obj = self.get_variable(object_name)
+ else:
+ obj = self.evaluate_statement(invokable)
+ method_name = node.name
+ (h_args, h_kwargs) = self.reduce_arguments(node.args)
+ (args, kwargs) = self._unholder_args(h_args, h_kwargs)
+ if is_disabled(args, kwargs):
+ return Disabler()
+ if isinstance(obj, str):
+ return self.string_method_call(obj, method_name, args, kwargs)
+ if isinstance(obj, bool):
+ return self.bool_method_call(obj, method_name, args, kwargs)
+ if isinstance(obj, int):
+ return self.int_method_call(obj, method_name, args, kwargs)
+ if isinstance(obj, list):
+ return self.array_method_call(obj, method_name, args, kwargs)
+ if isinstance(obj, dict):
+ return self.dict_method_call(obj, method_name, args, kwargs)
+ if not isinstance(obj, InterpreterObject):
+ raise InvalidArguments('Variable "%s" is not callable.' % object_name)
+ # Special case. This is the only thing you can do with a disabler
+ # object. Every other use immediately returns the disabler object.
+ if isinstance(obj, Disabler):
+ if method_name == 'found':
+ return False
+ else:
+ return Disabler()
+ # TODO: InterpreterBase **really** shouldn't be in charge of checking this
+ if method_name == 'extract_objects':
+ if not isinstance(obj, ObjectHolder):
+ raise InvalidArguments(f'Invalid operation "extract_objects" on variable "{object_name}" of type {type(obj).__name__}')
+ self.validate_extraction(obj.held_object)
+ obj.current_node = node
+ return self._holderify(obj.method_call(method_name, args, kwargs))
+
+ def _holderify(self, res: T.Union[TYPE_var, InterpreterObject, None]) -> T.Union[TYPE_elementary, InterpreterObject]:
+ if res is None:
+ return None
+ if isinstance(res, (int, bool, str)):
+ return res
+ elif isinstance(res, list):
+ return [self._holderify(x) for x in res]
+ elif isinstance(res, dict):
+ return {k: self._holderify(v) for k, v in res.items()}
+ elif isinstance(res, mesonlib.HoldableObject):
+ # Always check for an exact match first.
+ cls = self.holder_map.get(type(res), None)
+ if cls is not None:
+ # Casts to Interpreter are required here since an assertion would
+ # not work for the `ast` module.
+ return cls(res, T.cast('Interpreter', self))
+ # Try the boundary types next.
+ for typ, cls in self.bound_holder_map.items():
+ if isinstance(res, typ):
+ return cls(res, T.cast('Interpreter', self))
+ raise mesonlib.MesonBugException(f'Object {res} of type {type(res).__name__} is neither in self.holder_map nor self.bound_holder_map.')
+ elif isinstance(res, ObjectHolder):
+ raise mesonlib.MesonBugException(f'Returned object {res} of type {type(res).__name__} is an object holder.')
+ elif isinstance(res, MesonInterpreterObject):
+ return res
+ raise mesonlib.MesonBugException(f'Unknown returned object {res} of type {type(res).__name__} in the parameters.')
+
+ def _unholder_args(self,
+ args: T.List[T.Union[TYPE_var, InterpreterObject]],
+ kwargs: T.Dict[str, T.Union[TYPE_var, InterpreterObject]]) -> T.Tuple[T.List[TYPE_var], TYPE_kwargs]:
+ return [_unholder(x) for x in args], {k: _unholder(v) for k, v in kwargs.items()}
+
+ @builtinMethodNoKwargs
+ def bool_method_call(self, obj: bool, method_name: str, posargs: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Union[str, int]:
+ if method_name == 'to_string':
+ if not posargs:
+ if obj:
+ return 'true'
+ else:
+ return 'false'
+ elif len(posargs) == 2 and isinstance(posargs[0], str) and isinstance(posargs[1], str):
+ if obj:
+ return posargs[0]
+ else:
+ return posargs[1]
+ else:
+ raise InterpreterException('bool.to_string() must have either no arguments or exactly two string arguments that signify what values to return for true and false.')
+ elif method_name == 'to_int':
+ if obj:
+ return 1
+ else:
+ return 0
+ else:
+ raise InterpreterException('Unknown method "%s" for a boolean.' % method_name)
+
+ @builtinMethodNoKwargs
+ def int_method_call(self, obj: int, method_name: str, posargs: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Union[str, bool]:
+ if method_name == 'is_even':
+ if not posargs:
+ return obj % 2 == 0
+ else:
+ raise InterpreterException('int.is_even() must have no arguments.')
+ elif method_name == 'is_odd':
+ if not posargs:
+ return obj % 2 != 0
+ else:
+ raise InterpreterException('int.is_odd() must have no arguments.')
+ elif method_name == 'to_string':
+ if not posargs:
+ return str(obj)
+ else:
+ raise InterpreterException('int.to_string() must have no arguments.')
+ else:
+ raise InterpreterException('Unknown method "%s" for an integer.' % method_name)
+
+ @staticmethod
+ def _get_one_string_posarg(posargs: T.List[TYPE_var], method_name: str) -> str:
+ if len(posargs) > 1:
+ raise InterpreterException(f'{method_name}() must have zero or one arguments')
+ elif len(posargs) == 1:
+ s = posargs[0]
+ if not isinstance(s, str):
+ raise InterpreterException(f'{method_name}() argument must be a string')
+ return s
+ return None
+
+ @builtinMethodNoKwargs
+ def string_method_call(self, obj: str, method_name: str, posargs: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Union[str, int, bool, T.List[str]]:
+ if method_name == 'strip':
+ s1 = self._get_one_string_posarg(posargs, 'strip')
+ if s1 is not None:
+ return obj.strip(s1)
+ return obj.strip()
+ elif method_name == 'format':
+ return self.format_string(obj, posargs)
+ elif method_name == 'to_upper':
+ return obj.upper()
+ elif method_name == 'to_lower':
+ return obj.lower()
+ elif method_name == 'underscorify':
+ return re.sub(r'[^a-zA-Z0-9]', '_', obj)
+ elif method_name == 'split':
+ s2 = self._get_one_string_posarg(posargs, 'split')
+ if s2 is not None:
+ return obj.split(s2)
+ return obj.split()
+ elif method_name == 'startswith' or method_name == 'contains' or method_name == 'endswith':
+ s3 = posargs[0]
+ if not isinstance(s3, str):
+ raise InterpreterException('Argument must be a string.')
+ if method_name == 'startswith':
+ return obj.startswith(s3)
+ elif method_name == 'contains':
+ return obj.find(s3) >= 0
+ return obj.endswith(s3)
+ elif method_name == 'to_int':
+ try:
+ return int(obj)
+ except Exception:
+ raise InterpreterException(f'String {obj!r} cannot be converted to int')
+ elif method_name == 'join':
+ if len(posargs) != 1:
+ raise InterpreterException('Join() takes exactly one argument.')
+ strlist = posargs[0]
+ check_stringlist(strlist)
+ assert isinstance(strlist, list) # Required for mypy
+ return obj.join(strlist)
+ elif method_name == 'version_compare':
+ if len(posargs) != 1:
+ raise InterpreterException('Version_compare() takes exactly one argument.')
+ cmpr = posargs[0]
+ if not isinstance(cmpr, str):
+ raise InterpreterException('Version_compare() argument must be a string.')
+ if isinstance(obj, MesonVersionString):
+ self.tmp_meson_version = cmpr
+ return mesonlib.version_compare(obj, cmpr)
+ elif method_name == 'substring':
+ if len(posargs) > 2:
+ raise InterpreterException('substring() takes maximum two arguments.')
+ start = 0
+ end = len(obj)
+ if len (posargs) > 0:
+ if not isinstance(posargs[0], int):
+ raise InterpreterException('substring() argument must be an int')
+ start = posargs[0]
+ if len (posargs) > 1:
+ if not isinstance(posargs[1], int):
+ raise InterpreterException('substring() argument must be an int')
+ end = posargs[1]
+ return obj[start:end]
+ elif method_name == 'replace':
+ FeatureNew.single_use('str.replace', '0.58.0', self.subproject)
+ if len(posargs) != 2:
+ raise InterpreterException('replace() takes exactly two arguments.')
+ if not isinstance(posargs[0], str) or not isinstance(posargs[1], str):
+ raise InterpreterException('replace() requires that both arguments be strings')
+ return obj.replace(posargs[0], posargs[1])
+ raise InterpreterException('Unknown method "%s" for a string.' % method_name)
+
+ def format_string(self, templ: str, args: T.List[TYPE_var]) -> str:
+ arg_strings = []
+ for arg in args:
+ if isinstance(arg, mparser.BaseNode):
+ arg = self.evaluate_statement(arg)
+ if isinstance(arg, bool): # Python boolean is upper case.
+ arg = str(arg).lower()
+ arg_strings.append(str(arg))
+
+ def arg_replace(match: T.Match[str]) -> str:
+ idx = int(match.group(1))
+ if idx >= len(arg_strings):
+ raise InterpreterException(f'Format placeholder @{idx}@ out of range.')
+ return arg_strings[idx]
+
+ return re.sub(r'@(\d+)@', arg_replace, templ)
+
+ def unknown_function_called(self, func_name: str) -> None:
+ raise InvalidCode('Unknown function "%s".' % func_name)
+
+ @builtinMethodNoKwargs
+ def array_method_call(self,
+ obj: T.List[T.Union[TYPE_elementary, InterpreterObject]],
+ method_name: str,
+ posargs: T.List[TYPE_var],
+ kwargs: TYPE_kwargs) -> T.Union[TYPE_var, InterpreterObject]:
+ if method_name == 'contains':
+ def check_contains(el: T.List[TYPE_var]) -> bool:
+ if len(posargs) != 1:
+ raise InterpreterException('Contains method takes exactly one argument.')
+ item = posargs[0]
+ for element in el:
+ if isinstance(element, list):
+ found = check_contains(element)
+ if found:
+ return True
+ if element == item:
+ return True
+ return False
+ return check_contains([_unholder(x) for x in obj])
+ elif method_name == 'length':
+ return len(obj)
+ elif method_name == 'get':
+ index = posargs[0]
+ fallback = None
+ if len(posargs) == 2:
+ fallback = self._holderify(posargs[1])
+ elif len(posargs) > 2:
+ m = 'Array method \'get()\' only takes two arguments: the ' \
+ 'index and an optional fallback value if the index is ' \
+ 'out of range.'
+ raise InvalidArguments(m)
+ if not isinstance(index, int):
+ raise InvalidArguments('Array index must be a number.')
+ if index < -len(obj) or index >= len(obj):
+ if fallback is None:
+ m = 'Array index {!r} is out of bounds for array of size {!r}.'
+ raise InvalidArguments(m.format(index, len(obj)))
+ if isinstance(fallback, mparser.BaseNode):
+ return self.evaluate_statement(fallback)
+ return fallback
+ return obj[index]
+ raise InterpreterException(f'Arrays do not have a method called {method_name!r}.')
+
+ @builtinMethodNoKwargs
+ def dict_method_call(self,
+ obj: T.Dict[str, T.Union[TYPE_elementary, InterpreterObject]],
+ method_name: str,
+ posargs: T.List[TYPE_var],
+ kwargs: TYPE_kwargs) -> T.Union[TYPE_var, InterpreterObject]:
+ if method_name in ('has_key', 'get'):
+ if method_name == 'has_key':
+ if len(posargs) != 1:
+ raise InterpreterException('has_key() takes exactly one argument.')
+ else:
+ if len(posargs) not in (1, 2):
+ raise InterpreterException('get() takes one or two arguments.')
+
+ key = posargs[0]
+ if not isinstance(key, (str)):
+ raise InvalidArguments('Dictionary key must be a string.')
+
+ has_key = key in obj
+
+ if method_name == 'has_key':
+ return has_key
+
+ if has_key:
+ return obj[key]
+
+ if len(posargs) == 2:
+ fallback = self._holderify(posargs[1])
+ if isinstance(fallback, mparser.BaseNode):
+ return self.evaluate_statement(fallback)
+ return fallback
+
+ raise InterpreterException(f'Key {key!r} is not in the dictionary.')
+
+ if method_name == 'keys':
+ if len(posargs) != 0:
+ raise InterpreterException('keys() takes no arguments.')
+ return sorted(obj.keys())
+
+ raise InterpreterException('Dictionaries do not have a method called "%s".' % method_name)
+
+ def reduce_arguments(
+ self,
+ args: mparser.ArgumentNode,
+ key_resolver: T.Callable[[mparser.BaseNode], str] = default_resolve_key,
+ duplicate_key_error: T.Optional[str] = None,
+ ) -> T.Tuple[
+ T.List[T.Union[TYPE_var, InterpreterObject]],
+ T.Dict[str, T.Union[TYPE_var, InterpreterObject]]
+ ]:
+ assert(isinstance(args, mparser.ArgumentNode))
+ if args.incorrect_order():
+ raise InvalidArguments('All keyword arguments must be after positional arguments.')
+ self.argument_depth += 1
+ reduced_pos: T.List[T.Union[TYPE_var, InterpreterObject]] = [self.evaluate_statement(arg) for arg in args.arguments]
+ reduced_kw: T.Dict[str, T.Union[TYPE_var, InterpreterObject]] = {}
+ for key, val in args.kwargs.items():
+ reduced_key = key_resolver(key)
+ assert isinstance(val, mparser.BaseNode)
+ reduced_val = self.evaluate_statement(val)
+ if duplicate_key_error and reduced_key in reduced_kw:
+ raise InvalidArguments(duplicate_key_error.format(reduced_key))
+ reduced_kw[reduced_key] = reduced_val
+ self.argument_depth -= 1
+ final_kw = self.expand_default_kwargs(reduced_kw)
+ return reduced_pos, final_kw
+
+ def expand_default_kwargs(self, kwargs: T.Dict[str, T.Union[TYPE_var, InterpreterObject]]) -> T.Dict[str, T.Union[TYPE_var, InterpreterObject]]:
+ if 'kwargs' not in kwargs:
+ return kwargs
+ to_expand = kwargs.pop('kwargs')
+ if not isinstance(to_expand, dict):
+ raise InterpreterException('Value of "kwargs" must be dictionary.')
+ if 'kwargs' in to_expand:
+ raise InterpreterException('Kwargs argument must not contain a "kwargs" entry. Points for thinking meta, though. :P')
+ for k, v in to_expand.items():
+ if k in kwargs:
+ raise InterpreterException(f'Entry "{k}" defined both as a keyword argument and in a "kwarg" entry.')
+ kwargs[k] = v
+ return kwargs
+
+ def assignment(self, node: mparser.AssignmentNode) -> None:
+ assert(isinstance(node, mparser.AssignmentNode))
+ if self.argument_depth != 0:
+ raise InvalidArguments('''Tried to assign values inside an argument list.
+To specify a keyword argument, use : instead of =.''')
+ var_name = node.var_name
+ if not isinstance(var_name, str):
+ raise InvalidArguments('Tried to assign value to a non-variable.')
+ value = self.evaluate_statement(node.value)
+ if not self.is_assignable(value):
+ raise InvalidCode(f'Tried to assign the invalid value "{value}" of type {type(value).__name__} to variable.')
+ # For mutable objects we need to make a copy on assignment
+ if isinstance(value, MutableInterpreterObject):
+ value = copy.deepcopy(value)
+ self.set_variable(var_name, value)
+ return None
+
+ def set_variable(self, varname: str, variable: T.Union[TYPE_var, InterpreterObject], *, holderify: bool = False) -> None:
+ if variable is None:
+ raise InvalidCode('Can not assign None to variable.')
+ if holderify:
+ variable = self._holderify(variable)
+ else:
+ # Ensure that we are never storing a HoldableObject
+ def check(x: T.Union[TYPE_var, InterpreterObject]) -> None:
+ if isinstance(x, mesonlib.HoldableObject):
+ raise mesonlib.MesonBugException(f'set_variable in InterpreterBase called with a HoldableObject {x} of type {type(x).__name__}')
+ elif isinstance(x, list):
+ for y in x:
+ check(y)
+ elif isinstance(x, dict):
+ for v in x.values():
+ check(v)
+ check(variable)
+ if not isinstance(varname, str):
+ raise InvalidCode('First argument to set_variable must be a string.')
+ if not self.is_assignable(variable):
+ raise InvalidCode(f'Assigned value "{variable}" of type {type(variable).__name__} is not an assignable type.')
+ if re.match('[_a-zA-Z][_0-9a-zA-Z]*$', varname) is None:
+ raise InvalidCode('Invalid variable name: ' + varname)
+ if varname in self.builtin:
+ raise InvalidCode('Tried to overwrite internal variable "%s"' % varname)
+ self.variables[varname] = variable
+
+ def get_variable(self, varname: str) -> T.Union[TYPE_var, InterpreterObject]:
+ if varname in self.builtin:
+ return self.builtin[varname]
+ if varname in self.variables:
+ return self.variables[varname]
+ raise InvalidCode('Unknown variable "%s".' % varname)
+
+ def is_assignable(self, value: T.Any) -> bool:
+ return isinstance(value, (InterpreterObject, str, int, list, dict))
+
+ def validate_extraction(self, buildtarget: mesonlib.HoldableObject) -> None:
+ raise InterpreterException('validate_extraction is not implemented in this context (please file a bug)')
diff --git a/meson/mesonbuild/linkers/__init__.py b/meson/mesonbuild/linkers/__init__.py
new file mode 100644
index 000000000..9182fa1a8
--- /dev/null
+++ b/meson/mesonbuild/linkers/__init__.py
@@ -0,0 +1,126 @@
+# Copyright 2012-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .detect import (
+ defaults,
+ guess_win_linker,
+ guess_nix_linker,
+)
+from .linkers import (
+ RSPFileSyntax,
+
+ StaticLinker,
+ VisualStudioLikeLinker,
+ VisualStudioLinker,
+ IntelVisualStudioLinker,
+ ArLinker,
+ ArmarLinker,
+ DLinker,
+ CcrxLinker,
+ Xc16Linker,
+ CompCertLinker,
+ C2000Linker,
+ AIXArLinker,
+ PGIStaticLinker,
+ NvidiaHPC_StaticLinker,
+
+ DynamicLinker,
+ PosixDynamicLinkerMixin,
+ GnuLikeDynamicLinkerMixin,
+ AppleDynamicLinker,
+ GnuDynamicLinker,
+ GnuGoldDynamicLinker,
+ GnuBFDDynamicLinker,
+ LLVMDynamicLinker,
+ WASMDynamicLinker,
+ CcrxDynamicLinker,
+ Xc16DynamicLinker,
+ CompCertDynamicLinker,
+ C2000DynamicLinker,
+ ArmDynamicLinker,
+ ArmClangDynamicLinker,
+ QualcommLLVMDynamicLinker,
+ PGIDynamicLinker,
+ NvidiaHPC_DynamicLinker,
+
+ VisualStudioLikeLinkerMixin,
+ MSVCDynamicLinker,
+ ClangClDynamicLinker,
+ XilinkDynamicLinker,
+ SolarisDynamicLinker,
+ AIXDynamicLinker,
+ OptlinkDynamicLinker,
+ CudaLinker,
+
+ prepare_rpaths,
+ order_rpaths,
+ evaluate_rpath,
+)
+
+__all__ = [
+ # detect.py
+ 'defaults',
+ 'guess_win_linker',
+ 'guess_nix_linker',
+
+ # linkers.py
+ 'RSPFileSyntax',
+
+ 'StaticLinker',
+ 'VisualStudioLikeLinker',
+ 'VisualStudioLinker',
+ 'IntelVisualStudioLinker',
+ 'ArLinker',
+ 'ArmarLinker',
+ 'DLinker',
+ 'CcrxLinker',
+ 'Xc16Linker',
+ 'CompCertLinker',
+ 'C2000Linker',
+ 'AIXArLinker',
+ 'PGIStaticLinker',
+ 'NvidiaHPC_StaticLinker',
+
+ 'DynamicLinker',
+ 'PosixDynamicLinkerMixin',
+ 'GnuLikeDynamicLinkerMixin',
+ 'AppleDynamicLinker',
+ 'GnuDynamicLinker',
+ 'GnuGoldDynamicLinker',
+ 'GnuBFDDynamicLinker',
+ 'LLVMDynamicLinker',
+ 'WASMDynamicLinker',
+ 'CcrxDynamicLinker',
+ 'Xc16DynamicLinker',
+ 'CompCertDynamicLinker',
+ 'C2000DynamicLinker',
+ 'ArmDynamicLinker',
+ 'ArmClangDynamicLinker',
+ 'QualcommLLVMDynamicLinker',
+ 'PGIDynamicLinker',
+ 'NvidiaHPC_DynamicLinker',
+
+ 'VisualStudioLikeLinkerMixin',
+ 'MSVCDynamicLinker',
+ 'ClangClDynamicLinker',
+ 'XilinkDynamicLinker',
+ 'SolarisDynamicLinker',
+ 'AIXDynamicLinker',
+ 'OptlinkDynamicLinker',
+ 'CudaLinker',
+
+ 'prepare_rpaths',
+ 'order_rpaths',
+ 'evaluate_rpath',
+]
diff --git a/meson/mesonbuild/linkers/detect.py b/meson/mesonbuild/linkers/detect.py
new file mode 100644
index 000000000..43dccbbcb
--- /dev/null
+++ b/meson/mesonbuild/linkers/detect.py
@@ -0,0 +1,216 @@
+# Copyright 2012-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ..mesonlib import (
+ EnvironmentException, MachineChoice, OptionKey,
+ Popen_safe, search_version
+)
+from .linkers import (
+ DynamicLinker,
+ AppleDynamicLinker,
+ GnuDynamicLinker,
+ GnuGoldDynamicLinker,
+ GnuBFDDynamicLinker,
+ LLVMDynamicLinker,
+ QualcommLLVMDynamicLinker,
+ MSVCDynamicLinker,
+ ClangClDynamicLinker,
+ SolarisDynamicLinker,
+ AIXDynamicLinker,
+ OptlinkDynamicLinker,
+)
+
+import re
+import shlex
+import typing as T
+
+if T.TYPE_CHECKING:
+ from ..environment import Environment
+ from ..compilers import Compiler
+
+defaults: T.Dict[str, T.List[str]] = {}
+defaults['static_linker'] = ['ar', 'gar']
+defaults['vs_static_linker'] = ['lib']
+defaults['clang_cl_static_linker'] = ['llvm-lib']
+defaults['cuda_static_linker'] = ['nvlink']
+defaults['gcc_static_linker'] = ['gcc-ar']
+defaults['clang_static_linker'] = ['llvm-ar']
+
+def __failed_to_detect_linker(compiler: T.List[str], args: T.List[str], stdout: str, stderr: str) -> 'T.NoReturn':
+ msg = 'Unable to detect linker for compiler "{} {}"\nstdout: {}\nstderr: {}'.format(
+ ' '.join(compiler), ' '.join(args), stdout, stderr)
+ raise EnvironmentException(msg)
+
+
+def guess_win_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Type['Compiler'],
+ for_machine: MachineChoice, *,
+ use_linker_prefix: bool = True, invoked_directly: bool = True,
+ extra_args: T.Optional[T.List[str]] = None) -> 'DynamicLinker':
+ env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+
+ # Explicitly pass logo here so that we can get the version of link.exe
+ if not use_linker_prefix or comp_class.LINKER_PREFIX is None:
+ check_args = ['/logo', '--version']
+ elif isinstance(comp_class.LINKER_PREFIX, str):
+ check_args = [comp_class.LINKER_PREFIX + '/logo', comp_class.LINKER_PREFIX + '--version']
+ elif isinstance(comp_class.LINKER_PREFIX, list):
+ check_args = comp_class.LINKER_PREFIX + ['/logo'] + comp_class.LINKER_PREFIX + ['--version']
+
+ check_args += env.coredata.options[OptionKey('args', lang=comp_class.language, machine=for_machine)].value
+
+ override = [] # type: T.List[str]
+ value = env.lookup_binary_entry(for_machine, comp_class.language + '_ld')
+ if value is not None:
+ override = comp_class.use_linker_args(value[0])
+ check_args += override
+
+ if extra_args is not None:
+ check_args.extend(extra_args)
+
+ p, o, _ = Popen_safe(compiler + check_args)
+ if 'LLD' in o.split('\n')[0]:
+ if '(compatible with GNU linkers)' in o:
+ return LLVMDynamicLinker(
+ compiler, for_machine, comp_class.LINKER_PREFIX,
+ override, version=search_version(o))
+ elif not invoked_directly:
+ return ClangClDynamicLinker(
+ for_machine, override, exelist=compiler, prefix=comp_class.LINKER_PREFIX,
+ version=search_version(o), direct=False, machine=None)
+
+ if value is not None and invoked_directly:
+ compiler = value
+ # We've already hanedled the non-direct case above
+
+ p, o, e = Popen_safe(compiler + check_args)
+ if 'LLD' in o.split('\n')[0]:
+ return ClangClDynamicLinker(
+ for_machine, [],
+ prefix=comp_class.LINKER_PREFIX if use_linker_prefix else [],
+ exelist=compiler, version=search_version(o), direct=invoked_directly)
+ elif 'OPTLINK' in o:
+ # Opltink's stdout *may* beging with a \r character.
+ return OptlinkDynamicLinker(compiler, for_machine, version=search_version(o))
+ elif o.startswith('Microsoft') or e.startswith('Microsoft'):
+ out = o or e
+ match = re.search(r'.*(X86|X64|ARM|ARM64).*', out)
+ if match:
+ target = str(match.group(1))
+ else:
+ target = 'x86'
+
+ return MSVCDynamicLinker(
+ for_machine, [], machine=target, exelist=compiler,
+ prefix=comp_class.LINKER_PREFIX if use_linker_prefix else [],
+ version=search_version(out), direct=invoked_directly)
+ elif 'GNU coreutils' in o:
+ raise EnvironmentException(
+ "Found GNU link.exe instead of MSVC link.exe. This link.exe "
+ "is not a linker. You may need to reorder entries to your "
+ "%PATH% variable to resolve this.")
+ __failed_to_detect_linker(compiler, check_args, o, e)
+
+def guess_nix_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Type['Compiler'],
+ for_machine: MachineChoice, *,
+ extra_args: T.Optional[T.List[str]] = None) -> 'DynamicLinker':
+ """Helper for guessing what linker to use on Unix-Like OSes.
+
+ :compiler: Invocation to use to get linker
+ :comp_class: The Compiler Type (uninstantiated)
+ :for_machine: which machine this linker targets
+ :extra_args: Any additional arguments required (such as a source file)
+ """
+ env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ extra_args = extra_args or []
+ extra_args += env.coredata.options[OptionKey('args', lang=comp_class.language, machine=for_machine)].value
+
+ if isinstance(comp_class.LINKER_PREFIX, str):
+ check_args = [comp_class.LINKER_PREFIX + '--version'] + extra_args
+ else:
+ check_args = comp_class.LINKER_PREFIX + ['--version'] + extra_args
+
+ override = [] # type: T.List[str]
+ value = env.lookup_binary_entry(for_machine, comp_class.language + '_ld')
+ if value is not None:
+ override = comp_class.use_linker_args(value[0])
+ check_args += override
+
+ _, o, e = Popen_safe(compiler + check_args)
+ v = search_version(o + e)
+ linker: DynamicLinker
+ if 'LLD' in o.split('\n')[0]:
+ linker = LLVMDynamicLinker(
+ compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
+ elif 'Snapdragon' in e and 'LLVM' in e:
+ linker = QualcommLLVMDynamicLinker(
+ compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
+ elif e.startswith('lld-link: '):
+ # The LLD MinGW frontend didn't respond to --version before version 9.0.0,
+ # and produced an error message about failing to link (when no object
+ # files were specified), instead of printing the version number.
+ # Let's try to extract the linker invocation command to grab the version.
+
+ _, o, e = Popen_safe(compiler + check_args + ['-v'])
+
+ try:
+ linker_cmd = re.match(r'.*\n(.*?)\nlld-link: ', e, re.DOTALL).group(1)
+ linker_cmd = shlex.split(linker_cmd)[0]
+ except (AttributeError, IndexError, ValueError):
+ pass
+ else:
+ _, o, e = Popen_safe([linker_cmd, '--version'])
+ v = search_version(o)
+
+ linker = LLVMDynamicLinker(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
+ # first is for apple clang, second is for real gcc, the third is icc
+ elif e.endswith('(use -v to see invocation)\n') or 'macosx_version' in e or 'ld: unknown option:' in e:
+ if isinstance(comp_class.LINKER_PREFIX, str):
+ _, _, e = Popen_safe(compiler + [comp_class.LINKER_PREFIX + '-v'] + extra_args)
+ else:
+ _, _, e = Popen_safe(compiler + comp_class.LINKER_PREFIX + ['-v'] + extra_args)
+ for line in e.split('\n'):
+ if 'PROJECT:ld' in line:
+ v = line.split('-')[1]
+ break
+ else:
+ v = 'unknown version'
+ linker = AppleDynamicLinker(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
+ elif 'GNU' in o or 'GNU' in e:
+ cls: T.Type[GnuDynamicLinker]
+ if 'gold' in o or 'gold' in e:
+ cls = GnuGoldDynamicLinker
+ else:
+ cls = GnuBFDDynamicLinker
+ linker = cls(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
+ elif 'Solaris' in e or 'Solaris' in o:
+ for line in (o+e).split('\n'):
+ if 'ld: Software Generation Utilities' in line:
+ v = line.split(':')[2].lstrip()
+ break
+ else:
+ v = 'unknown version'
+ linker = SolarisDynamicLinker(
+ compiler, for_machine, comp_class.LINKER_PREFIX, override,
+ version=v)
+ elif 'ld: 0706-012 The -- flag is not recognized' in e:
+ if isinstance(comp_class.LINKER_PREFIX, str):
+ _, _, e = Popen_safe(compiler + [comp_class.LINKER_PREFIX + '-V'] + extra_args)
+ else:
+ _, _, e = Popen_safe(compiler + comp_class.LINKER_PREFIX + ['-V'] + extra_args)
+ linker = AIXDynamicLinker(
+ compiler, for_machine, comp_class.LINKER_PREFIX, override,
+ version=search_version(e))
+ else:
+ __failed_to_detect_linker(compiler, check_args, o, e)
+ return linker
diff --git a/meson/mesonbuild/linkers/linkers.py b/meson/mesonbuild/linkers/linkers.py
new file mode 100644
index 000000000..bcd5355b8
--- /dev/null
+++ b/meson/mesonbuild/linkers/linkers.py
@@ -0,0 +1,1433 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import abc
+import enum
+import os
+import typing as T
+
+from .. import mesonlib
+from ..mesonlib import EnvironmentException, MesonException
+from ..arglist import CompilerArgs
+
+if T.TYPE_CHECKING:
+ from ..coredata import KeyedOptionDictType
+ from ..environment import Environment
+ from ..mesonlib import MachineChoice
+
+
+@enum.unique
+class RSPFileSyntax(enum.Enum):
+
+ """Which RSP file syntax the compiler supports."""
+
+ MSVC = enum.auto()
+ GCC = enum.auto()
+
+
+class StaticLinker:
+
+ id: str
+
+ def __init__(self, exelist: T.List[str]):
+ self.exelist = exelist
+
+ def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CompilerArgs:
+ return CompilerArgs(self, args)
+
+ def can_linker_accept_rsp(self) -> bool:
+ """
+ Determines whether the linker can accept arguments using the @rsp syntax.
+ """
+ return mesonlib.is_windows()
+
+ def get_base_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ """Like compilers.get_base_link_args, but for the static linker."""
+ return []
+
+ def get_exelist(self) -> T.List[str]:
+ return self.exelist.copy()
+
+ def get_std_link_args(self) -> T.List[str]:
+ return []
+
+ def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]:
+ return []
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return[]
+
+ def get_coverage_link_args(self) -> T.List[str]:
+ return []
+
+ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+ rpath_paths: str, build_rpath: str,
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ return ([], set())
+
+ def thread_link_flags(self, env: 'Environment') -> T.List[str]:
+ return []
+
+ def openmp_flags(self) -> T.List[str]:
+ return []
+
+ def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return []
+
+ @classmethod
+ def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]:
+ return args[:]
+
+ @classmethod
+ def native_args_to_unix(cls, args: T.List[str]) -> T.List[str]:
+ return args[:]
+
+ def get_link_debugfile_name(self, targetfile: str) -> str:
+ return None
+
+ def get_link_debugfile_args(self, targetfile: str) -> T.List[str]:
+ # Static libraries do not have PDB files
+ return []
+
+ def get_always_args(self) -> T.List[str]:
+ return []
+
+ def get_linker_always_args(self) -> T.List[str]:
+ return []
+
+ def rsp_file_syntax(self) -> RSPFileSyntax:
+ """The format of the RSP file that this compiler supports.
+
+ If `self.can_linker_accept_rsp()` returns True, then this needs to
+ be implemented
+ """
+ assert not self.can_linker_accept_rsp(), f'{self.id} linker accepts RSP, but doesn\' provide a supported format, this is a bug'
+ raise EnvironmentException(f'{self.id} does not implemnt rsp format, this shouldn\'t be called')
+
+
+class VisualStudioLikeLinker:
+ always_args = ['/NOLOGO']
+
+ def __init__(self, machine: str):
+ self.machine = machine
+
+ def get_always_args(self) -> T.List[str]:
+ return self.always_args.copy()
+
+ def get_linker_always_args(self) -> T.List[str]:
+ return self.always_args.copy()
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ args = [] # type: T.List[str]
+ if self.machine:
+ args += ['/MACHINE:' + self.machine]
+ args += ['/OUT:' + target]
+ return args
+
+ @classmethod
+ def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]:
+ from ..compilers import VisualStudioCCompiler
+ return VisualStudioCCompiler.unix_args_to_native(args)
+
+ @classmethod
+ def native_args_to_unix(cls, args: T.List[str]) -> T.List[str]:
+ from ..compilers import VisualStudioCCompiler
+ return VisualStudioCCompiler.native_args_to_unix(args)
+
+ def rsp_file_syntax(self) -> RSPFileSyntax:
+ return RSPFileSyntax.MSVC
+
+
+class VisualStudioLinker(VisualStudioLikeLinker, StaticLinker):
+
+ """Microsoft's lib static linker."""
+
+ def __init__(self, exelist: T.List[str], machine: str):
+ StaticLinker.__init__(self, exelist)
+ VisualStudioLikeLinker.__init__(self, machine)
+
+
+class IntelVisualStudioLinker(VisualStudioLikeLinker, StaticLinker):
+
+ """Intel's xilib static linker."""
+
+ def __init__(self, exelist: T.List[str], machine: str):
+ StaticLinker.__init__(self, exelist)
+ VisualStudioLikeLinker.__init__(self, machine)
+
+
+class ArLinker(StaticLinker):
+
+ def __init__(self, exelist: T.List[str]):
+ super().__init__(exelist)
+ self.id = 'ar'
+ pc, stdo = mesonlib.Popen_safe(self.exelist + ['-h'])[0:2]
+ # Enable deterministic builds if they are available.
+ if '[D]' in stdo:
+ self.std_args = ['csrD']
+ else:
+ self.std_args = ['csr']
+ self.can_rsp = '@<' in stdo
+
+ def can_linker_accept_rsp(self) -> bool:
+ return self.can_rsp
+
+ def get_std_link_args(self) -> T.List[str]:
+ return self.std_args
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return [target]
+
+ def rsp_file_syntax(self) -> RSPFileSyntax:
+ return RSPFileSyntax.GCC
+
+
+class ArmarLinker(ArLinker): # lgtm [py/missing-call-to-init]
+
+ def __init__(self, exelist: T.List[str]):
+ StaticLinker.__init__(self, exelist)
+ self.id = 'armar'
+ self.std_args = ['-csr']
+
+ def can_linker_accept_rsp(self) -> bool:
+ # armar can't accept arguments using the @rsp syntax
+ return False
+
+
+class DLinker(StaticLinker):
+ def __init__(self, exelist: T.List[str], arch: str, *, rsp_syntax: RSPFileSyntax = RSPFileSyntax.GCC):
+ super().__init__(exelist)
+ self.id = exelist[0]
+ self.arch = arch
+ self.__rsp_syntax = rsp_syntax
+
+ def get_std_link_args(self) -> T.List[str]:
+ return ['-lib']
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return ['-of=' + target]
+
+ def get_linker_always_args(self) -> T.List[str]:
+ if mesonlib.is_windows():
+ if self.arch == 'x86_64':
+ return ['-m64']
+ elif self.arch == 'x86_mscoff' and self.id == 'dmd':
+ return ['-m32mscoff']
+ return ['-m32']
+ return []
+
+ def rsp_file_syntax(self) -> RSPFileSyntax:
+ return self.__rsp_syntax
+
+
+class CcrxLinker(StaticLinker):
+
+ def __init__(self, exelist: T.List[str]):
+ super().__init__(exelist)
+ self.id = 'rlink'
+
+ def can_linker_accept_rsp(self) -> bool:
+ return False
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return [f'-output={target}']
+
+ def get_linker_always_args(self) -> T.List[str]:
+ return ['-nologo', '-form=library']
+
+
+class Xc16Linker(StaticLinker):
+
+ def __init__(self, exelist: T.List[str]):
+ super().__init__(exelist)
+ self.id = 'xc16-ar'
+
+ def can_linker_accept_rsp(self) -> bool:
+ return False
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return [f'{target}']
+
+ def get_linker_always_args(self) -> T.List[str]:
+ return ['rcs']
+
+class CompCertLinker(StaticLinker):
+
+ def __init__(self, exelist: T.List[str]):
+ super().__init__(exelist)
+ self.id = 'ccomp'
+
+ def can_linker_accept_rsp(self) -> bool:
+ return False
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return [f'-o{target}']
+
+
+class C2000Linker(StaticLinker):
+
+ def __init__(self, exelist: T.List[str]):
+ super().__init__(exelist)
+ self.id = 'ar2000'
+
+ def can_linker_accept_rsp(self) -> bool:
+ return False
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return [f'{target}']
+
+ def get_linker_always_args(self) -> T.List[str]:
+ return ['-r']
+
+
+class AIXArLinker(ArLinker):
+
+ def __init__(self, exelist: T.List[str]):
+ StaticLinker.__init__(self, exelist)
+ self.id = 'aixar'
+ self.std_args = ['-csr', '-Xany']
+
+ def can_linker_accept_rsp(self) -> bool:
+ # AIXAr can't accept arguments using the @rsp syntax
+ return False
+
+
+def prepare_rpaths(raw_rpaths: str, build_dir: str, from_dir: str) -> T.List[str]:
+ # The rpaths we write must be relative if they point to the build dir,
+ # because otherwise they have different length depending on the build
+ # directory. This breaks reproducible builds.
+ internal_format_rpaths = [evaluate_rpath(p, build_dir, from_dir) for p in raw_rpaths]
+ ordered_rpaths = order_rpaths(internal_format_rpaths)
+ return ordered_rpaths
+
+
+def order_rpaths(rpath_list: T.List[str]) -> T.List[str]:
+ # We want rpaths that point inside our build dir to always override
+ # those pointing to other places in the file system. This is so built
+ # binaries prefer our libraries to the ones that may lie somewhere
+ # in the file system, such as /lib/x86_64-linux-gnu.
+ #
+ # The correct thing to do here would be C++'s std::stable_partition.
+ # Python standard library does not have it, so replicate it with
+ # sort, which is guaranteed to be stable.
+ return sorted(rpath_list, key=os.path.isabs)
+
+
+def evaluate_rpath(p: str, build_dir: str, from_dir: str) -> str:
+ if p == from_dir:
+ return '' # relpath errors out in this case
+ elif os.path.isabs(p):
+ return p # These can be outside of build dir.
+ else:
+ return os.path.relpath(os.path.join(build_dir, p), os.path.join(build_dir, from_dir))
+
+class DynamicLinker(metaclass=abc.ABCMeta):
+
+ """Base class for dynamic linkers."""
+
+ _BUILDTYPE_ARGS = {
+ 'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
+ 'custom': [],
+ } # type: T.Dict[str, T.List[str]]
+
+ @abc.abstractproperty
+ def id(self) -> str:
+ pass
+
+ def _apply_prefix(self, arg: T.Union[str, T.List[str]]) -> T.List[str]:
+ args = [arg] if isinstance(arg, str) else arg
+ if self.prefix_arg is None:
+ return args
+ elif isinstance(self.prefix_arg, str):
+ return [self.prefix_arg + arg for arg in args]
+ ret = []
+ for arg in args:
+ ret += self.prefix_arg + [arg]
+ return ret
+
+ def __init__(self, exelist: T.List[str],
+ for_machine: mesonlib.MachineChoice, prefix_arg: T.Union[str, T.List[str]],
+ always_args: T.List[str], *, version: str = 'unknown version'):
+ self.exelist = exelist
+ self.for_machine = for_machine
+ self.version = version
+ self.prefix_arg = prefix_arg
+ self.always_args = always_args
+ self.machine = None # type: T.Optional[str]
+
+ def __repr__(self) -> str:
+ return '<{}: v{} `{}`>'.format(type(self).__name__, self.version, ' '.join(self.exelist))
+
+ def get_id(self) -> str:
+ return self.id
+
+ def get_version_string(self) -> str:
+ return f'({self.id} {self.version})'
+
+ def get_exelist(self) -> T.List[str]:
+ return self.exelist.copy()
+
+ def get_accepts_rsp(self) -> bool:
+ # rsp files are only used when building on Windows because we want to
+ # avoid issues with quoting and max argument length
+ return mesonlib.is_windows()
+
+ def rsp_file_syntax(self) -> RSPFileSyntax:
+ """The format of the RSP file that this compiler supports.
+
+ If `self.can_linker_accept_rsp()` returns True, then this needs to
+ be implemented
+ """
+ return RSPFileSyntax.GCC
+
+ def get_always_args(self) -> T.List[str]:
+ return self.always_args.copy()
+
+ def get_lib_prefix(self) -> str:
+ return ''
+
+ # XXX: is use_ldflags a compiler or a linker attribute?
+
+ def get_option_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return []
+
+ def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
+ raise EnvironmentException(f'Language {self.id} does not support has_multi_link_arguments.')
+
+ def get_debugfile_name(self, targetfile: str) -> str:
+ '''Name of debug file written out (see below)'''
+ return None
+
+ def get_debugfile_args(self, targetfile: str) -> T.List[str]:
+ """Some compilers (MSVC) write debug into a separate file.
+
+ This method takes the target object path and returns a list of
+ commands to append to the linker invocation to control where that
+ file is written.
+ """
+ return []
+
+ def get_std_shared_lib_args(self) -> T.List[str]:
+ return []
+
+ def get_std_shared_module_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return self.get_std_shared_lib_args()
+
+ def get_pie_args(self) -> T.List[str]:
+ # TODO: this really needs to take a boolean and return the args to
+ # disable pie, otherwise it only acts to enable pie if pie *isn't* the
+ # default.
+ raise EnvironmentException(f'Linker {self.id} does not support position-independent executable')
+
+ def get_lto_args(self) -> T.List[str]:
+ return []
+
+ def sanitizer_args(self, value: str) -> T.List[str]:
+ return []
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ # We can override these in children by just overriding the
+ # _BUILDTYPE_ARGS value.
+ return self._BUILDTYPE_ARGS[buildtype]
+
+ def get_asneeded_args(self) -> T.List[str]:
+ return []
+
+ def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+ raise EnvironmentException(
+ f'Linker {self.id} does not support link_whole')
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ raise EnvironmentException(
+ f'Linker {self.id} does not support allow undefined')
+
+ @abc.abstractmethod
+ def get_output_args(self, outname: str) -> T.List[str]:
+ pass
+
+ def get_coverage_args(self) -> T.List[str]:
+ raise EnvironmentException(f"Linker {self.id} doesn't implement coverage data generation.")
+
+ @abc.abstractmethod
+ def get_search_args(self, dirname: str) -> T.List[str]:
+ pass
+
+ def export_dynamic_args(self, env: 'Environment') -> T.List[str]:
+ return []
+
+ def import_library_args(self, implibname: str) -> T.List[str]:
+ """The name of the outputted import library.
+
+ This implementation is used only on Windows by compilers that use GNU ld
+ """
+ return []
+
+ def thread_flags(self, env: 'Environment') -> T.List[str]:
+ return []
+
+ def no_undefined_args(self) -> T.List[str]:
+ """Arguments to error if there are any undefined symbols at link time.
+
+ This is the inverse of get_allow_undefined_args().
+
+ TODO: A future cleanup might merge this and
+ get_allow_undefined_args() into a single method taking a
+ boolean
+ """
+ return []
+
+ def fatal_warnings(self) -> T.List[str]:
+ """Arguments to make all warnings errors."""
+ return []
+
+ def headerpad_args(self) -> T.List[str]:
+ # Only used by the Apple linker
+ return []
+
+ def get_gui_app_args(self, value: bool) -> T.List[str]:
+ # Only used by VisualStudioLikeLinkers
+ return []
+
+ def get_win_subsystem_args(self, value: str) -> T.List[str]:
+ # Only used if supported by the dynamic linker and
+ # only when targeting Windows
+ return []
+
+ def bitcode_args(self) -> T.List[str]:
+ raise MesonException('This linker does not support bitcode bundles')
+
+ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+ rpath_paths: str, build_rpath: str,
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ return ([], set())
+
+ def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+ suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+ is_shared_module: bool) -> T.List[str]:
+ return []
+
+
+class PosixDynamicLinkerMixin:
+
+ """Mixin class for POSIX-ish linkers.
+
+ This is obviously a pretty small subset of the linker interface, but
+ enough dynamic linkers that meson supports are POSIX-like but not
+ GNU-like that it makes sense to split this out.
+ """
+
+ def get_output_args(self, outname: str) -> T.List[str]:
+ return ['-o', outname]
+
+ def get_std_shared_lib_args(self) -> T.List[str]:
+ return ['-shared']
+
+ def get_search_args(self, dirname: str) -> T.List[str]:
+ return ['-L' + dirname]
+
+
+class GnuLikeDynamicLinkerMixin:
+
+ """Mixin class for dynamic linkers that provides gnu-like interface.
+
+ This acts as a base for the GNU linkers (bfd and gold), LLVM's lld, and
+ other linkers like GNU-ld.
+ """
+
+ if T.TYPE_CHECKING:
+ for_machine = MachineChoice.HOST
+ def _apply_prefix(self, arg: T.Union[str, T.List[str]]) -> T.List[str]: ...
+
+ _BUILDTYPE_ARGS = {
+ 'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': ['-O1'],
+ 'minsize': [],
+ 'custom': [],
+ } # type: T.Dict[str, T.List[str]]
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ # We can override these in children by just overriding the
+ # _BUILDTYPE_ARGS value.
+ return mesonlib.listify([self._apply_prefix(a) for a in self._BUILDTYPE_ARGS[buildtype]])
+
+ def get_pie_args(self) -> T.List[str]:
+ return ['-pie']
+
+ def get_asneeded_args(self) -> T.List[str]:
+ return self._apply_prefix('--as-needed')
+
+ def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+ if not args:
+ return args
+ return self._apply_prefix('--whole-archive') + args + self._apply_prefix('--no-whole-archive')
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ return self._apply_prefix('--allow-shlib-undefined')
+
+ def get_lto_args(self) -> T.List[str]:
+ return ['-flto']
+
+ def sanitizer_args(self, value: str) -> T.List[str]:
+ if value == 'none':
+ return []
+ return ['-fsanitize=' + value]
+
+ def get_coverage_args(self) -> T.List[str]:
+ return ['--coverage']
+
+ def export_dynamic_args(self, env: 'Environment') -> T.List[str]:
+ m = env.machines[self.for_machine]
+ if m.is_windows() or m.is_cygwin():
+ return self._apply_prefix('--export-all-symbols')
+ return self._apply_prefix('-export-dynamic')
+
+ def import_library_args(self, implibname: str) -> T.List[str]:
+ return self._apply_prefix('--out-implib=' + implibname)
+
+ def thread_flags(self, env: 'Environment') -> T.List[str]:
+ if env.machines[self.for_machine].is_haiku():
+ return []
+ return ['-pthread']
+
+ def no_undefined_args(self) -> T.List[str]:
+ return self._apply_prefix('--no-undefined')
+
+ def fatal_warnings(self) -> T.List[str]:
+ return self._apply_prefix('--fatal-warnings')
+
+ def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+ suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+ is_shared_module: bool) -> T.List[str]:
+ m = env.machines[self.for_machine]
+ if m.is_windows() or m.is_cygwin():
+ # For PE/COFF the soname argument has no effect
+ return []
+ sostr = '' if soversion is None else '.' + soversion
+ return self._apply_prefix(f'-soname,{prefix}{shlib_name}.{suffix}{sostr}')
+
+ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+ rpath_paths: str, build_rpath: str,
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ m = env.machines[self.for_machine]
+ if m.is_windows() or m.is_cygwin():
+ return ([], set())
+ if not rpath_paths and not install_rpath and not build_rpath:
+ return ([], set())
+ args = []
+ origin_placeholder = '$ORIGIN'
+ processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
+ # Need to deduplicate rpaths, as macOS's install_name_tool
+ # is *very* allergic to duplicate -delete_rpath arguments
+ # when calling depfixer on installation.
+ all_paths = mesonlib.OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths])
+ rpath_dirs_to_remove = set()
+ for p in all_paths:
+ rpath_dirs_to_remove.add(p.encode('utf8'))
+ # Build_rpath is used as-is (it is usually absolute).
+ if build_rpath != '':
+ all_paths.add(build_rpath)
+ for p in build_rpath.split(':'):
+ rpath_dirs_to_remove.add(p.encode('utf8'))
+
+ # TODO: should this actually be "for (dragonfly|open)bsd"?
+ if mesonlib.is_dragonflybsd() or mesonlib.is_openbsd():
+ # This argument instructs the compiler to record the value of
+ # ORIGIN in the .dynamic section of the elf. On Linux this is done
+ # by default, but is not on dragonfly/openbsd for some reason. Without this
+ # $ORIGIN in the runtime path will be undefined and any binaries
+ # linked against local libraries will fail to resolve them.
+ args.extend(self._apply_prefix('-z,origin'))
+
+ # In order to avoid relinking for RPATH removal, the binary needs to contain just
+ # enough space in the ELF header to hold the final installation RPATH.
+ paths = ':'.join(all_paths)
+ if len(paths) < len(install_rpath):
+ padding = 'X' * (len(install_rpath) - len(paths))
+ if not paths:
+ paths = padding
+ else:
+ paths = paths + ':' + padding
+ args.extend(self._apply_prefix('-rpath,' + paths))
+
+ # TODO: should this actually be "for solaris/sunos"?
+ if mesonlib.is_sunos():
+ return (args, rpath_dirs_to_remove)
+
+ # Rpaths to use while linking must be absolute. These are not
+ # written to the binary. Needed only with GNU ld:
+ # https://sourceware.org/bugzilla/show_bug.cgi?id=16936
+ # Not needed on Windows or other platforms that don't use RPATH
+ # https://github.com/mesonbuild/meson/issues/1897
+ #
+ # In addition, this linker option tends to be quite long and some
+ # compilers have trouble dealing with it. That's why we will include
+ # one option per folder, like this:
+ #
+ # -Wl,-rpath-link,/path/to/folder1 -Wl,-rpath,/path/to/folder2 ...
+ #
+ # ...instead of just one single looooong option, like this:
+ #
+ # -Wl,-rpath-link,/path/to/folder1:/path/to/folder2:...
+ for p in rpath_paths:
+ args.extend(self._apply_prefix('-rpath-link,' + os.path.join(build_dir, p)))
+
+ return (args, rpath_dirs_to_remove)
+
+ def get_win_subsystem_args(self, value: str) -> T.List[str]:
+ if 'windows' in value:
+ args = ['--subsystem,windows']
+ elif 'console' in value:
+ args = ['--subsystem,console']
+ else:
+ raise MesonException(f'Only "windows" and "console" are supported for win_subsystem with MinGW, not "{value}".')
+ if ',' in value:
+ args[-1] = args[-1] + ':' + value.split(',')[1]
+
+ return self._apply_prefix(args)
+
+
+class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
+
+ """Apple's ld implementation."""
+
+ id = 'ld64'
+
+ def get_asneeded_args(self) -> T.List[str]:
+ return self._apply_prefix('-dead_strip_dylibs')
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ return self._apply_prefix('-undefined,dynamic_lookup')
+
+ def get_std_shared_module_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+ return ['-bundle'] + self._apply_prefix('-undefined,dynamic_lookup')
+
+ def get_pie_args(self) -> T.List[str]:
+ return []
+
+ def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+ result = [] # type: T.List[str]
+ for a in args:
+ result.extend(self._apply_prefix('-force_load'))
+ result.append(a)
+ return result
+
+ def get_coverage_args(self) -> T.List[str]:
+ return ['--coverage']
+
+ def sanitizer_args(self, value: str) -> T.List[str]:
+ if value == 'none':
+ return []
+ return ['-fsanitize=' + value]
+
+ def no_undefined_args(self) -> T.List[str]:
+ return self._apply_prefix('-undefined,error')
+
+ def headerpad_args(self) -> T.List[str]:
+ return self._apply_prefix('-headerpad_max_install_names')
+
+ def bitcode_args(self) -> T.List[str]:
+ return self._apply_prefix('-bitcode_bundle')
+
+ def fatal_warnings(self) -> T.List[str]:
+ return self._apply_prefix('-fatal_warnings')
+
+ def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+ suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+ is_shared_module: bool) -> T.List[str]:
+ if is_shared_module:
+ return []
+ install_name = ['@rpath/', prefix, shlib_name]
+ if soversion is not None:
+ install_name.append('.' + soversion)
+ install_name.append('.dylib')
+ args = ['-install_name', ''.join(install_name)]
+ if darwin_versions:
+ args.extend(['-compatibility_version', darwin_versions[0],
+ '-current_version', darwin_versions[1]])
+ return args
+
+ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+ rpath_paths: str, build_rpath: str,
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ if not rpath_paths and not install_rpath and not build_rpath:
+ return ([], set())
+ args = []
+ # @loader_path is the equivalent of $ORIGIN on macOS
+ # https://stackoverflow.com/q/26280738
+ origin_placeholder = '@loader_path'
+ processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
+ all_paths = mesonlib.OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths])
+ if build_rpath != '':
+ all_paths.add(build_rpath)
+ for rp in all_paths:
+ args.extend(self._apply_prefix('-rpath,' + rp))
+
+ return (args, set())
+
+
+class GnuDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker):
+
+ """Representation of GNU ld.bfd and ld.gold."""
+
+ def get_accepts_rsp(self) -> bool:
+ return True
+
+
+class GnuGoldDynamicLinker(GnuDynamicLinker):
+
+ id = 'ld.gold'
+
+
+class GnuBFDDynamicLinker(GnuDynamicLinker):
+
+ id = 'ld.bfd'
+
+
+class LLVMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker):
+
+ """Representation of LLVM's ld.lld linker.
+
+ This is only the gnu-like linker, not the apple like or link.exe like
+ linkers.
+ """
+
+ id = 'ld.lld'
+
+ def __init__(self, exelist: T.List[str],
+ for_machine: mesonlib.MachineChoice, prefix_arg: T.Union[str, T.List[str]],
+ always_args: T.List[str], *, version: str = 'unknown version'):
+ super().__init__(exelist, for_machine, prefix_arg, always_args, version=version)
+
+ # Some targets don't seem to support this argument (windows, wasm, ...)
+ _, _, e = mesonlib.Popen_safe(self.exelist + self._apply_prefix('--allow-shlib-undefined'))
+ self.has_allow_shlib_undefined = not ('unknown argument: --allow-shlib-undefined' in e)
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ if self.has_allow_shlib_undefined:
+ return self._apply_prefix('--allow-shlib-undefined')
+ return []
+
+
+class WASMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker):
+
+ """Emscripten's wasm-ld."""
+
+ id = 'ld.wasm'
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ return ['-s', 'ERROR_ON_UNDEFINED_SYMBOLS=0']
+
+ def no_undefined_args(self) -> T.List[str]:
+ return ['-s', 'ERROR_ON_UNDEFINED_SYMBOLS=1']
+
+ def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+ suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+ is_shared_module: bool) -> T.List[str]:
+ raise MesonException(f'{self.id} does not support shared libraries.')
+
+ def get_asneeded_args(self) -> T.List[str]:
+ return []
+
+ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+ rpath_paths: str, build_rpath: str,
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ return ([], set())
+
+
+class CcrxDynamicLinker(DynamicLinker):
+
+ """Linker for Renesis CCrx compiler."""
+
+ id = 'rlink'
+
+ def __init__(self, for_machine: mesonlib.MachineChoice,
+ *, version: str = 'unknown version'):
+ super().__init__(['rlink.exe'], for_machine, '', [],
+ version=version)
+
+ def get_accepts_rsp(self) -> bool:
+ return False
+
+ def get_lib_prefix(self) -> str:
+ return '-lib='
+
+ def get_std_shared_lib_args(self) -> T.List[str]:
+ return []
+
+ def get_output_args(self, outputname: str) -> T.List[str]:
+ return [f'-output={outputname}']
+
+ def get_search_args(self, dirname: str) -> 'T.NoReturn':
+ raise OSError('rlink.exe does not have a search dir argument')
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ return []
+
+ def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+ suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+ is_shared_module: bool) -> T.List[str]:
+ return []
+
+
+class Xc16DynamicLinker(DynamicLinker):
+
+ """Linker for Microchip XC16 compiler."""
+
+ id = 'xc16-gcc'
+
+ def __init__(self, for_machine: mesonlib.MachineChoice,
+ *, version: str = 'unknown version'):
+ super().__init__(['xc16-gcc.exe'], for_machine, '', [],
+ version=version)
+
+ def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+ if not args:
+ return args
+ return self._apply_prefix('--start-group') + args + self._apply_prefix('--end-group')
+
+ def get_accepts_rsp(self) -> bool:
+ return False
+
+ def get_lib_prefix(self) -> str:
+ return ''
+
+ def get_std_shared_lib_args(self) -> T.List[str]:
+ return []
+
+ def get_output_args(self, outputname: str) -> T.List[str]:
+ return [f'-o{outputname}']
+
+ def get_search_args(self, dirname: str) -> 'T.NoReturn':
+ raise OSError('xc16-gcc.exe does not have a search dir argument')
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ return []
+
+ def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+ suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+ is_shared_module: bool) -> T.List[str]:
+ return []
+
+ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+ rpath_paths: str, build_rpath: str,
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ return ([], set())
+
+class CompCertDynamicLinker(DynamicLinker):
+
+ """Linker for CompCert C compiler."""
+
+ id = 'ccomp'
+
+ def __init__(self, for_machine: mesonlib.MachineChoice,
+ *, version: str = 'unknown version'):
+ super().__init__(['ccomp'], for_machine, '', [],
+ version=version)
+
+ def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+ if not args:
+ return args
+ return self._apply_prefix('-Wl,--whole-archive') + args + self._apply_prefix('-Wl,--no-whole-archive')
+
+ def get_accepts_rsp(self) -> bool:
+ return False
+
+ def get_lib_prefix(self) -> str:
+ return ''
+
+ def get_std_shared_lib_args(self) -> T.List[str]:
+ return []
+
+ def get_output_args(self, outputname: str) -> T.List[str]:
+ return [f'-o{outputname}']
+
+ def get_search_args(self, dirname: str) -> T.List[str]:
+ return [f'-L{dirname}']
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ return []
+
+ def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+ suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+ is_shared_module: bool) -> T.List[str]:
+ raise MesonException(f'{self.id} does not support shared libraries.')
+
+ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+ rpath_paths: str, build_rpath: str,
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ return ([], set())
+
+class C2000DynamicLinker(DynamicLinker):
+
+ """Linker for Texas Instruments C2000 compiler."""
+
+ id = 'cl2000'
+
+ def __init__(self, exelist: T.List[str], for_machine: mesonlib.MachineChoice,
+ *, version: str = 'unknown version'):
+ super().__init__(exelist or ['cl2000.exe'], for_machine, '', [],
+ version=version)
+
+ def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+ if not args:
+ return args
+ return self._apply_prefix('--start-group') + args + self._apply_prefix('--end-group')
+
+ def get_accepts_rsp(self) -> bool:
+ return False
+
+ def get_lib_prefix(self) -> str:
+ return '-l='
+
+ def get_std_shared_lib_args(self) -> T.List[str]:
+ return []
+
+ def get_output_args(self, outputname: str) -> T.List[str]:
+ return ['-z', f'--output_file={outputname}']
+
+ def get_search_args(self, dirname: str) -> 'T.NoReturn':
+ raise OSError('cl2000.exe does not have a search dir argument')
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ return []
+
+ def get_always_args(self) -> T.List[str]:
+ return []
+
+
+class ArmDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
+
+ """Linker for the ARM compiler."""
+
+ id = 'armlink'
+
+ def __init__(self, for_machine: mesonlib.MachineChoice,
+ *, version: str = 'unknown version'):
+ super().__init__(['armlink'], for_machine, '', [],
+ version=version)
+
+ def get_accepts_rsp(self) -> bool:
+ return False
+
+ def get_std_shared_lib_args(self) -> 'T.NoReturn':
+ raise MesonException('The Arm Linkers do not support shared libraries')
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ return []
+
+
+class ArmClangDynamicLinker(ArmDynamicLinker):
+
+ """Linker used with ARM's clang fork.
+
+ The interface is similar enough to the old ARM ld that it inherits and
+ extends a few things as needed.
+ """
+
+ def export_dynamic_args(self, env: 'Environment') -> T.List[str]:
+ return ['--export_dynamic']
+
+ def import_library_args(self, implibname: str) -> T.List[str]:
+ return ['--symdefs=' + implibname]
+
+class QualcommLLVMDynamicLinker(LLVMDynamicLinker):
+
+ """ARM Linker from Snapdragon LLVM ARM Compiler."""
+
+ id = 'ld.qcld'
+
+
+class PGIDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
+
+ """PGI linker."""
+
+ id = 'pgi'
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ return []
+
+ def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+ suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+ is_shared_module: bool) -> T.List[str]:
+ return []
+
+ def get_std_shared_lib_args(self) -> T.List[str]:
+ # PGI -shared is Linux only.
+ if mesonlib.is_windows():
+ return ['-Bdynamic', '-Mmakedll']
+ elif mesonlib.is_linux():
+ return ['-shared']
+ return []
+
+ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+ rpath_paths: str, build_rpath: str,
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ if not env.machines[self.for_machine].is_windows():
+ return (['-R' + os.path.join(build_dir, p) for p in rpath_paths], set())
+ return ([], set())
+
+NvidiaHPC_DynamicLinker = PGIDynamicLinker
+
+
+class PGIStaticLinker(StaticLinker):
+ def __init__(self, exelist: T.List[str]):
+ super().__init__(exelist)
+ self.id = 'ar'
+ self.std_args = ['-r']
+
+ def get_std_link_args(self) -> T.List[str]:
+ return self.std_args
+
+ def get_output_args(self, target: str) -> T.List[str]:
+ return [target]
+
+NvidiaHPC_StaticLinker = PGIStaticLinker
+
+
+class VisualStudioLikeLinkerMixin:
+
+ """Mixin class for for dynamic linkers that act like Microsoft's link.exe."""
+
+ if T.TYPE_CHECKING:
+ for_machine = MachineChoice.HOST
+ def _apply_prefix(self, arg: T.Union[str, T.List[str]]) -> T.List[str]: ...
+
+ _BUILDTYPE_ARGS = {
+ 'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ # The otherwise implicit REF and ICF linker optimisations are disabled by
+ # /DEBUG. REF implies ICF.
+ 'release': ['/OPT:REF'],
+ 'minsize': ['/INCREMENTAL:NO', '/OPT:REF'],
+ 'custom': [],
+ } # type: T.Dict[str, T.List[str]]
+
+ def __init__(self, exelist: T.List[str], for_machine: mesonlib.MachineChoice,
+ prefix_arg: T.Union[str, T.List[str]], always_args: T.List[str], *,
+ version: str = 'unknown version', direct: bool = True, machine: str = 'x86'):
+ # There's no way I can find to make mypy understand what's going on here
+ super().__init__(exelist, for_machine, prefix_arg, always_args, version=version) # type: ignore
+ self.machine = machine
+ self.direct = direct
+
+ def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+ return mesonlib.listify([self._apply_prefix(a) for a in self._BUILDTYPE_ARGS[buildtype]])
+
+ def invoked_by_compiler(self) -> bool:
+ return not self.direct
+
+ def get_output_args(self, outputname: str) -> T.List[str]:
+ return self._apply_prefix(['/MACHINE:' + self.machine, '/OUT:' + outputname])
+
+ def get_always_args(self) -> T.List[str]:
+ parent = super().get_always_args() # type: ignore
+ return self._apply_prefix('/nologo') + T.cast(T.List[str], parent)
+
+ def get_search_args(self, dirname: str) -> T.List[str]:
+ return self._apply_prefix('/LIBPATH:' + dirname)
+
+ def get_std_shared_lib_args(self) -> T.List[str]:
+ return self._apply_prefix('/DLL')
+
+ def get_debugfile_name(self, targetfile: str) -> str:
+ basename = targetfile.rsplit('.', maxsplit=1)[0]
+ return basename + '.pdb'
+
+ def get_debugfile_args(self, targetfile: str) -> T.List[str]:
+ return self._apply_prefix(['/DEBUG', '/PDB:' + self.get_debugfile_name(targetfile)])
+
+ def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+ # Only since VS2015
+ args = mesonlib.listify(args)
+ l = [] # T.List[str]
+ for a in args:
+ l.extend(self._apply_prefix('/WHOLEARCHIVE:' + a))
+ return l
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ return []
+
+ def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+ suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+ is_shared_module: bool) -> T.List[str]:
+ return []
+
+ def import_library_args(self, implibname: str) -> T.List[str]:
+ """The command to generate the import library."""
+ return self._apply_prefix(['/IMPLIB:' + implibname])
+
+ def rsp_file_syntax(self) -> RSPFileSyntax:
+ return RSPFileSyntax.MSVC
+
+
+class MSVCDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
+
+ """Microsoft's Link.exe."""
+
+ id = 'link'
+
+ def __init__(self, for_machine: mesonlib.MachineChoice, always_args: T.List[str], *,
+ exelist: T.Optional[T.List[str]] = None,
+ prefix: T.Union[str, T.List[str]] = '',
+ machine: str = 'x86', version: str = 'unknown version',
+ direct: bool = True):
+ super().__init__(exelist or ['link.exe'], for_machine,
+ prefix, always_args, machine=machine, version=version, direct=direct)
+
+ def get_always_args(self) -> T.List[str]:
+ return self._apply_prefix(['/nologo', '/release']) + super().get_always_args()
+
+ def get_gui_app_args(self, value: bool) -> T.List[str]:
+ return self.get_win_subsystem_args("windows" if value else "console")
+
+ def get_win_subsystem_args(self, value: str) -> T.List[str]:
+ return self._apply_prefix([f'/SUBSYSTEM:{value.upper()}'])
+
+
+class ClangClDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
+
+ """Clang's lld-link.exe."""
+
+ id = 'lld-link'
+
+ def __init__(self, for_machine: mesonlib.MachineChoice, always_args: T.List[str], *,
+ exelist: T.Optional[T.List[str]] = None,
+ prefix: T.Union[str, T.List[str]] = '',
+ machine: str = 'x86', version: str = 'unknown version',
+ direct: bool = True):
+ super().__init__(exelist or ['lld-link.exe'], for_machine,
+ prefix, always_args, machine=machine, version=version, direct=direct)
+
+ def get_output_args(self, outputname: str) -> T.List[str]:
+ # If we're being driven indirectly by clang just skip /MACHINE
+ # as clang's target triple will handle the machine selection
+ if self.machine is None:
+ return self._apply_prefix([f"/OUT:{outputname}"])
+
+ return super().get_output_args(outputname)
+
+ def get_gui_app_args(self, value: bool) -> T.List[str]:
+ return self.get_win_subsystem_args("windows" if value else "console")
+
+ def get_win_subsystem_args(self, value: str) -> T.List[str]:
+ return self._apply_prefix([f'/SUBSYSTEM:{value.upper()}'])
+
+
+class XilinkDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
+
+ """Intel's Xilink.exe."""
+
+ id = 'xilink'
+
+ def __init__(self, for_machine: mesonlib.MachineChoice, always_args: T.List[str], *,
+ exelist: T.Optional[T.List[str]] = None,
+ prefix: T.Union[str, T.List[str]] = '',
+ machine: str = 'x86', version: str = 'unknown version',
+ direct: bool = True):
+ super().__init__(['xilink.exe'], for_machine, '', always_args, version=version)
+
+ def get_gui_app_args(self, value: bool) -> T.List[str]:
+ return self.get_win_subsystem_args("windows" if value else "console")
+
+ def get_win_subsystem_args(self, value: str) -> T.List[str]:
+ return self._apply_prefix([f'/SUBSYSTEM:{value.upper()}'])
+
+
+class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
+
+ """Sys-V derived linker used on Solaris and OpenSolaris."""
+
+ id = 'ld.solaris'
+
+ def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+ if not args:
+ return args
+ return self._apply_prefix('--whole-archive') + args + self._apply_prefix('--no-whole-archive')
+
+ def get_pie_args(self) -> T.List[str]:
+ # Available in Solaris 11.2 and later
+ pc, stdo, stde = mesonlib.Popen_safe(self.exelist + self._apply_prefix('-zhelp'))
+ for line in (stdo + stde).split('\n'):
+ if '-z type' in line:
+ if 'pie' in line:
+ return ['-z', 'type=pie']
+ break
+ return []
+
+ def get_asneeded_args(self) -> T.List[str]:
+ return self._apply_prefix(['-z', 'ignore'])
+
+ def no_undefined_args(self) -> T.List[str]:
+ return ['-z', 'defs']
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ return ['-z', 'nodefs']
+
+ def fatal_warnings(self) -> T.List[str]:
+ return ['-z', 'fatal-warnings']
+
+ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+ rpath_paths: str, build_rpath: str,
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ if not rpath_paths and not install_rpath and not build_rpath:
+ return ([], set())
+ processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
+ all_paths = mesonlib.OrderedSet([os.path.join('$ORIGIN', p) for p in processed_rpaths])
+ rpath_dirs_to_remove = set()
+ for p in all_paths:
+ rpath_dirs_to_remove.add(p.encode('utf8'))
+ if build_rpath != '':
+ all_paths.add(build_rpath)
+ for p in build_rpath.split(':'):
+ rpath_dirs_to_remove.add(p.encode('utf8'))
+
+ # In order to avoid relinking for RPATH removal, the binary needs to contain just
+ # enough space in the ELF header to hold the final installation RPATH.
+ paths = ':'.join(all_paths)
+ if len(paths) < len(install_rpath):
+ padding = 'X' * (len(install_rpath) - len(paths))
+ if not paths:
+ paths = padding
+ else:
+ paths = paths + ':' + padding
+ return (self._apply_prefix(f'-rpath,{paths}'), rpath_dirs_to_remove)
+
+ def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+ suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+ is_shared_module: bool) -> T.List[str]:
+ sostr = '' if soversion is None else '.' + soversion
+ return self._apply_prefix(f'-soname,{prefix}{shlib_name}.{suffix}{sostr}')
+
+
+class AIXDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
+
+ """Sys-V derived linker used on AIX"""
+
+ id = 'ld.aix'
+
+ def get_always_args(self) -> T.List[str]:
+ return self._apply_prefix(['-bnoipath', '-bbigtoc']) + super().get_always_args()
+
+ def no_undefined_args(self) -> T.List[str]:
+ return self._apply_prefix(['-bernotok'])
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ return self._apply_prefix(['-berok'])
+
+ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+ rpath_paths: str, build_rpath: str,
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ all_paths = mesonlib.OrderedSet() # type: mesonlib.OrderedSet[str]
+ # install_rpath first, followed by other paths, and the system path last
+ if install_rpath != '':
+ all_paths.add(install_rpath)
+ if build_rpath != '':
+ all_paths.add(build_rpath)
+ for p in rpath_paths:
+ all_paths.add(os.path.join(build_dir, p))
+ # We should consider allowing the $LIBPATH environment variable
+ # to override sys_path.
+ sys_path = env.get_compiler_system_dirs(self.for_machine)
+ if len(sys_path) == 0:
+ # get_compiler_system_dirs doesn't support our compiler.
+ # Use the default system library path
+ all_paths.update(['/usr/lib','/lib'])
+ else:
+ # Include the compiler's default library paths, but filter out paths that don't exist
+ for p in sys_path:
+ if os.path.isdir(p):
+ all_paths.add(p)
+ return (self._apply_prefix('-blibpath:' + ':'.join(all_paths)), set())
+
+ def thread_flags(self, env: 'Environment') -> T.List[str]:
+ return ['-pthread']
+
+
+class OptlinkDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
+
+ """Digital Mars dynamic linker for windows."""
+
+ id = 'optlink'
+
+ def __init__(self, exelist: T.List[str], for_machine: mesonlib.MachineChoice,
+ *, version: str = 'unknown version'):
+ # Use optlink instead of link so we don't interfer with other link.exe
+ # implementations.
+ super().__init__(exelist, for_machine, '', [], version=version)
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ return []
+
+ def get_debugfile_args(self, targetfile: str) -> T.List[str]:
+ # Optlink does not generate pdb files.
+ return []
+
+ def get_always_args(self) -> T.List[str]:
+ return []
+
+
+class CudaLinker(PosixDynamicLinkerMixin, DynamicLinker):
+ """Cuda linker (nvlink)"""
+
+ id = 'nvlink'
+
+ @staticmethod
+ def parse_version() -> str:
+ version_cmd = ['nvlink', '--version']
+ try:
+ _, out, _ = mesonlib.Popen_safe(version_cmd)
+ except OSError:
+ return 'unknown version'
+ # Output example:
+ # nvlink: NVIDIA (R) Cuda linker
+ # Copyright (c) 2005-2018 NVIDIA Corporation
+ # Built on Sun_Sep_30_21:09:22_CDT_2018
+ # Cuda compilation tools, release 10.0, V10.0.166
+ # we need the most verbose version output. Luckily starting with V
+ return out.strip().split('V')[-1]
+
+ def get_accepts_rsp(self) -> bool:
+ # nvcc does not support response files
+ return False
+
+ def get_lib_prefix(self) -> str:
+ if not mesonlib.is_windows():
+ return ''
+ # nvcc doesn't recognize Meson's default .a extension for static libraries on
+ # Windows and passes it to cl as an object file, resulting in 'warning D9024 :
+ # unrecognized source file type 'xxx.a', object file assumed'.
+ #
+ # nvcc's --library= option doesn't help: it takes the library name without the
+ # extension and assumes that the extension on Windows is .lib; prefixing the
+ # library with -Xlinker= seems to work.
+ from ..compilers import CudaCompiler
+ return CudaCompiler.LINKER_PREFIX
+
+ def fatal_warnings(self) -> T.List[str]:
+ return ['--warning-as-error']
+
+ def get_allow_undefined_args(self) -> T.List[str]:
+ return []
+
+ def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+ suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
+ is_shared_module: bool) -> T.List[str]:
+ return []
diff --git a/meson/mesonbuild/mcompile.py b/meson/mesonbuild/mcompile.py
new file mode 100644
index 000000000..bb7ecae9b
--- /dev/null
+++ b/meson/mesonbuild/mcompile.py
@@ -0,0 +1,358 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Entrypoint script for backend agnostic compile."""
+
+import os
+import json
+import re
+import sys
+import shutil
+import typing as T
+from collections import defaultdict
+from pathlib import Path
+
+from . import mlog
+from . import mesonlib
+from . import coredata
+from .mesonlib import MesonException
+from mesonbuild.environment import detect_ninja
+from mesonbuild.coredata import UserArrayOption
+
+if T.TYPE_CHECKING:
+ import argparse
+
+def array_arg(value: str) -> T.List[str]:
+ return UserArrayOption(None, value, allow_dups=True, user_input=True).value
+
+def validate_builddir(builddir: Path) -> None:
+ if not (builddir / 'meson-private' / 'coredata.dat' ).is_file():
+ raise MesonException(f'Current directory is not a meson build directory: `{builddir}`.\n'
+ 'Please specify a valid build dir or change the working directory to it.\n'
+ 'It is also possible that the build directory was generated with an old\n'
+ 'meson version. Please regenerate it in this case.')
+
+def get_backend_from_coredata(builddir: Path) -> str:
+ """
+ Gets `backend` option value from coredata
+ """
+ backend = coredata.load(str(builddir)).get_option(mesonlib.OptionKey('backend'))
+ assert isinstance(backend, str)
+ return backend
+
+def parse_introspect_data(builddir: Path) -> T.Dict[str, T.List[dict]]:
+ """
+ Converts a List of name-to-dict to a dict of name-to-dicts (since names are not unique)
+ """
+ path_to_intro = builddir / 'meson-info' / 'intro-targets.json'
+ if not path_to_intro.exists():
+ raise MesonException(f'`{path_to_intro.name}` is missing! Directory is not configured yet?')
+ with path_to_intro.open(encoding='utf-8') as f:
+ schema = json.load(f)
+
+ parsed_data = defaultdict(list) # type: T.Dict[str, T.List[dict]]
+ for target in schema:
+ parsed_data[target['name']] += [target]
+ return parsed_data
+
+class ParsedTargetName:
+ full_name = ''
+ name = ''
+ type = ''
+ path = ''
+
+ def __init__(self, target: str):
+ self.full_name = target
+ split = target.rsplit(':', 1)
+ if len(split) > 1:
+ self.type = split[1]
+ if not self._is_valid_type(self.type):
+ raise MesonException(f'Can\'t invoke target `{target}`: unknown target type: `{self.type}`')
+
+ split = split[0].rsplit('/', 1)
+ if len(split) > 1:
+ self.path = split[0]
+ self.name = split[1]
+ else:
+ self.name = split[0]
+
+ @staticmethod
+ def _is_valid_type(type: str) -> bool:
+ # Amend docs in Commands.md when editing this list
+ allowed_types = {
+ 'executable',
+ 'static_library',
+ 'shared_library',
+ 'shared_module',
+ 'custom',
+ 'run',
+ 'jar',
+ }
+ return type in allowed_types
+
+def get_target_from_intro_data(target: ParsedTargetName, builddir: Path, introspect_data: T.Dict[str, T.Any]) -> T.Dict[str, T.Any]:
+ if target.name not in introspect_data:
+ raise MesonException(f'Can\'t invoke target `{target.full_name}`: target not found')
+
+ intro_targets = introspect_data[target.name]
+ found_targets = [] # type: T.List[T.Dict[str, T.Any]]
+
+ resolved_bdir = builddir.resolve()
+
+ if not target.type and not target.path:
+ found_targets = intro_targets
+ else:
+ for intro_target in intro_targets:
+ if (intro_target['subproject'] or
+ (target.type and target.type != intro_target['type'].replace(' ', '_')) or
+ (target.path
+ and intro_target['filename'] != 'no_name'
+ and Path(target.path) != Path(intro_target['filename'][0]).relative_to(resolved_bdir).parent)):
+ continue
+ found_targets += [intro_target]
+
+ if not found_targets:
+ raise MesonException(f'Can\'t invoke target `{target.full_name}`: target not found')
+ elif len(found_targets) > 1:
+ raise MesonException(f'Can\'t invoke target `{target.full_name}`: ambigious name. Add target type and/or path: `PATH/NAME:TYPE`')
+
+ return found_targets[0]
+
+def generate_target_names_ninja(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> T.List[str]:
+ intro_target = get_target_from_intro_data(target, builddir, introspect_data)
+
+ if intro_target['type'] == 'run':
+ return [target.name]
+ else:
+ return [str(Path(out_file).relative_to(builddir.resolve())) for out_file in intro_target['filename']]
+
+def get_parsed_args_ninja(options: 'argparse.Namespace', builddir: Path) -> T.Tuple[T.List[str], T.Optional[T.Dict[str, str]]]:
+ runner = detect_ninja()
+ if runner is None:
+ raise MesonException('Cannot find ninja.')
+
+ cmd = runner
+ if not builddir.samefile('.'):
+ cmd.extend(['-C', builddir.as_posix()])
+
+ # If the value is set to < 1 then don't set anything, which let's
+ # ninja/samu decide what to do.
+ if options.jobs > 0:
+ cmd.extend(['-j', str(options.jobs)])
+ if options.load_average > 0:
+ cmd.extend(['-l', str(options.load_average)])
+
+ if options.verbose:
+ cmd.append('-v')
+
+ cmd += options.ninja_args
+
+ # operands must be processed after options/option-arguments
+ if options.targets:
+ intro_data = parse_introspect_data(builddir)
+ for t in options.targets:
+ cmd.extend(generate_target_names_ninja(ParsedTargetName(t), builddir, intro_data))
+ if options.clean:
+ cmd.append('clean')
+
+ return cmd, None
+
+def generate_target_name_vs(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> str:
+ intro_target = get_target_from_intro_data(target, builddir, introspect_data)
+
+ assert intro_target['type'] != 'run', 'Should not reach here: `run` targets must be handle above'
+
+ # Normalize project name
+ # Source: https://docs.microsoft.com/en-us/visualstudio/msbuild/how-to-build-specific-targets-in-solutions-by-using-msbuild-exe
+ target_name = re.sub(r"[\%\$\@\;\.\(\)']", '_', intro_target['id']) # type: str
+ rel_path = Path(intro_target['filename'][0]).relative_to(builddir.resolve()).parent
+ if rel_path != Path('.'):
+ target_name = str(rel_path / target_name)
+ return target_name
+
+def get_parsed_args_vs(options: 'argparse.Namespace', builddir: Path) -> T.Tuple[T.List[str], T.Optional[T.Dict[str, str]]]:
+ slns = list(builddir.glob('*.sln'))
+ assert len(slns) == 1, 'More than one solution in a project?'
+ sln = slns[0]
+
+ cmd = ['msbuild']
+
+ if options.targets:
+ intro_data = parse_introspect_data(builddir)
+ has_run_target = any(map(
+ lambda t:
+ get_target_from_intro_data(ParsedTargetName(t), builddir, intro_data)['type'] == 'run',
+ options.targets
+ ))
+
+ if has_run_target:
+ # `run` target can't be used the same way as other targets on `vs` backend.
+ # They are defined as disabled projects, which can't be invoked as `.sln`
+ # target and have to be invoked directly as project instead.
+ # Issue: https://github.com/microsoft/msbuild/issues/4772
+
+ if len(options.targets) > 1:
+ raise MesonException('Only one target may be specified when `run` target type is used on this backend.')
+ intro_target = get_target_from_intro_data(ParsedTargetName(options.targets[0]), builddir, intro_data)
+ proj_dir = Path(intro_target['filename'][0]).parent
+ proj = proj_dir/'{}.vcxproj'.format(intro_target['id'])
+ cmd += [str(proj.resolve())]
+ else:
+ cmd += [str(sln.resolve())]
+ cmd.extend(['-target:{}'.format(generate_target_name_vs(ParsedTargetName(t), builddir, intro_data)) for t in options.targets])
+ else:
+ cmd += [str(sln.resolve())]
+
+ if options.clean:
+ cmd.extend(['-target:Clean'])
+
+ # In msbuild `-maxCpuCount` with no number means "detect cpus", the default is `-maxCpuCount:1`
+ if options.jobs > 0:
+ cmd.append(f'-maxCpuCount:{options.jobs}')
+ else:
+ cmd.append('-maxCpuCount')
+
+ if options.load_average:
+ mlog.warning('Msbuild does not have a load-average switch, ignoring.')
+
+ if not options.verbose:
+ cmd.append('-verbosity:minimal')
+
+ cmd += options.vs_args
+
+ # Remove platform from env so that msbuild does not pick x86 platform when solution platform is Win32
+ env = os.environ.copy()
+ del env['PLATFORM']
+
+ return cmd, env
+
+def get_parsed_args_xcode(options: 'argparse.Namespace', builddir: Path) -> T.Tuple[T.List[str], T.Optional[T.Dict[str, str]]]:
+ runner = 'xcodebuild'
+ if not shutil.which(runner):
+ raise MesonException('Cannot find xcodebuild, did you install XCode?')
+
+ # No argument to switch directory
+ os.chdir(str(builddir))
+
+ cmd = [runner, '-parallelizeTargets']
+
+ if options.targets:
+ for t in options.targets:
+ cmd += ['-target', t]
+
+ if options.clean:
+ if options.targets:
+ cmd += ['clean']
+ else:
+ cmd += ['-alltargets', 'clean']
+ # Otherwise xcodebuild tries to delete the builddir and fails
+ cmd += ['-UseNewBuildSystem=FALSE']
+
+ if options.jobs > 0:
+ cmd.extend(['-jobs', str(options.jobs)])
+
+ if options.load_average > 0:
+ mlog.warning('xcodebuild does not have a load-average switch, ignoring')
+
+ if options.verbose:
+ # xcodebuild is already quite verbose, and -quiet doesn't print any
+ # status messages
+ pass
+
+ cmd += options.xcode_args
+ return cmd, None
+
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
+ """Add compile specific arguments."""
+ parser.add_argument(
+ 'targets',
+ metavar='TARGET',
+ nargs='*',
+ default=None,
+ help='Targets to build. Target has the following format: [PATH_TO_TARGET/]TARGET_NAME[:TARGET_TYPE].')
+ parser.add_argument(
+ '--clean',
+ action='store_true',
+ help='Clean the build directory.'
+ )
+ parser.add_argument(
+ '-C',
+ action='store',
+ dest='builddir',
+ type=Path,
+ default='.',
+ help='The directory containing build files to be built.'
+ )
+ parser.add_argument(
+ '-j', '--jobs',
+ action='store',
+ default=0,
+ type=int,
+ help='The number of worker jobs to run (if supported). If the value is less than 1 the build program will guess.'
+ )
+ parser.add_argument(
+ '-l', '--load-average',
+ action='store',
+ default=0,
+ type=int,
+ help='The system load average to try to maintain (if supported).'
+ )
+ parser.add_argument(
+ '-v', '--verbose',
+ action='store_true',
+ help='Show more verbose output.'
+ )
+ parser.add_argument(
+ '--ninja-args',
+ type=array_arg,
+ default=[],
+ help='Arguments to pass to `ninja` (applied only on `ninja` backend).'
+ )
+ parser.add_argument(
+ '--vs-args',
+ type=array_arg,
+ default=[],
+ help='Arguments to pass to `msbuild` (applied only on `vs` backend).'
+ )
+ parser.add_argument(
+ '--xcode-args',
+ type=array_arg,
+ default=[],
+ help='Arguments to pass to `xcodebuild` (applied only on `xcode` backend).'
+ )
+
+def run(options: 'argparse.Namespace') -> int:
+ bdir = options.builddir # type: Path
+ validate_builddir(bdir.resolve())
+
+ cmd = [] # type: T.List[str]
+ env = None # type: T.Optional[T.Dict[str, str]]
+
+ if options.targets and options.clean:
+ raise MesonException('`TARGET` and `--clean` can\'t be used simultaneously')
+
+ backend = get_backend_from_coredata(bdir)
+ if backend == 'ninja':
+ cmd, env = get_parsed_args_ninja(options, bdir)
+ elif backend.startswith('vs'):
+ cmd, env = get_parsed_args_vs(options, bdir)
+ elif backend == 'xcode':
+ cmd, env = get_parsed_args_xcode(options, bdir)
+ else:
+ raise MesonException(
+ f'Backend `{backend}` is not yet supported by `compile`. Use generated project files directly instead.')
+
+ p, *_ = mesonlib.Popen_safe(cmd, stdout=sys.stdout.buffer, stderr=sys.stderr.buffer, env=env)
+
+ return p.returncode
diff --git a/meson/mesonbuild/mconf.py b/meson/mesonbuild/mconf.py
new file mode 100644
index 000000000..4b3f33179
--- /dev/null
+++ b/meson/mesonbuild/mconf.py
@@ -0,0 +1,334 @@
+# Copyright 2014-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import itertools
+import shutil
+import os
+import textwrap
+import typing as T
+
+from . import build
+from . import coredata
+from . import environment
+from . import mesonlib
+from . import mintro
+from . import mlog
+from .ast import AstIDGenerator
+from .mesonlib import MachineChoice, OptionKey
+
+if T.TYPE_CHECKING:
+ import argparse
+ from .coredata import UserOption
+
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
+ coredata.register_builtin_arguments(parser)
+ parser.add_argument('builddir', nargs='?', default='.')
+ parser.add_argument('--clearcache', action='store_true', default=False,
+ help='Clear cached state (e.g. found dependencies)')
+
+def make_lower_case(val: T.Any) -> T.Union[str, T.List[T.Any]]: # T.Any because of recursion...
+ if isinstance(val, bool):
+ return str(val).lower()
+ elif isinstance(val, list):
+ return [make_lower_case(i) for i in val]
+ else:
+ return str(val)
+
+
+class ConfException(mesonlib.MesonException):
+ pass
+
+
+class Conf:
+ def __init__(self, build_dir):
+ self.build_dir = os.path.abspath(os.path.realpath(build_dir))
+ if 'meson.build' in [os.path.basename(self.build_dir), self.build_dir]:
+ self.build_dir = os.path.dirname(self.build_dir)
+ self.build = None
+ self.max_choices_line_length = 60
+ self.name_col = []
+ self.value_col = []
+ self.choices_col = []
+ self.descr_col = []
+ # XXX: is there a case where this can actually remain false?
+ self.has_choices = False
+ self.all_subprojects: T.Set[str] = set()
+ self.yielding_options: T.Set[OptionKey] = set()
+
+ if os.path.isdir(os.path.join(self.build_dir, 'meson-private')):
+ self.build = build.load(self.build_dir)
+ self.source_dir = self.build.environment.get_source_dir()
+ self.coredata = coredata.load(self.build_dir)
+ self.default_values_only = False
+ elif os.path.isfile(os.path.join(self.build_dir, environment.build_filename)):
+ # Make sure that log entries in other parts of meson don't interfere with the JSON output
+ mlog.disable()
+ self.source_dir = os.path.abspath(os.path.realpath(self.build_dir))
+ intr = mintro.IntrospectionInterpreter(self.source_dir, '', 'ninja', visitors = [AstIDGenerator()])
+ intr.analyze()
+ # Re-enable logging just in case
+ mlog.enable()
+ self.coredata = intr.coredata
+ self.default_values_only = True
+ else:
+ raise ConfException(f'Directory {build_dir} is neither a Meson build directory nor a project source directory.')
+
+ def clear_cache(self):
+ self.coredata.clear_deps_cache()
+
+ def set_options(self, options):
+ self.coredata.set_options(options)
+
+ def save(self):
+ # Do nothing when using introspection
+ if self.default_values_only:
+ return
+ # Only called if something has changed so overwrite unconditionally.
+ coredata.save(self.coredata, self.build_dir)
+ # We don't write the build file because any changes to it
+ # are erased when Meson is executed the next time, i.e. when
+ # Ninja is run.
+
+ def print_aligned(self) -> None:
+ """Do the actual printing.
+
+ This prints the generated output in an aligned, pretty form. it aims
+ for a total width of 160 characters, but will use whatever the tty
+ reports it's value to be. Though this is much wider than the standard
+ 80 characters of terminals, and even than the newer 120, compressing
+ it to those lengths makes the output hard to read.
+
+ Each column will have a specific width, and will be line wrapped.
+ """
+ total_width = shutil.get_terminal_size(fallback=(160, 0))[0]
+ _col = max(total_width // 5, 20)
+ four_column = (_col, _col, _col, total_width - (3 * _col))
+ # In this case we don't have the choices field, so we can redistribute
+ # the extra 40 characters to val and desc
+ three_column = (_col, _col * 2, total_width // 2)
+
+ for line in zip(self.name_col, self.value_col, self.choices_col, self.descr_col):
+ if not any(line):
+ print('')
+ continue
+
+ # This is a header, like `Subproject foo:`,
+ # We just want to print that and get on with it
+ if line[0] and not any(line[1:]):
+ print(line[0])
+ continue
+
+ # wrap will take a long string, and create a list of strings no
+ # longer than the size given. Then that list can be zipped into, to
+ # print each line of the output, such the that columns are printed
+ # to the right width, row by row.
+ if self.has_choices:
+ name = textwrap.wrap(line[0], four_column[0])
+ val = textwrap.wrap(line[1], four_column[1])
+ choice = textwrap.wrap(line[2], four_column[2])
+ desc = textwrap.wrap(line[3], four_column[3])
+ for l in itertools.zip_longest(name, val, choice, desc, fillvalue=''):
+ # We must use the length modifier here to get even rows, as
+ # `textwrap.wrap` will only shorten, not lengthen each item
+ print('{:{widths[0]}} {:{widths[1]}} {:{widths[2]}} {}'.format(*l, widths=four_column))
+ else:
+ name = textwrap.wrap(line[0], three_column[0])
+ val = textwrap.wrap(line[1], three_column[1])
+ desc = textwrap.wrap(line[3], three_column[2])
+ for l in itertools.zip_longest(name, val, desc, fillvalue=''):
+ print('{:{widths[0]}} {:{widths[1]}} {}'.format(*l, widths=three_column))
+
+ def split_options_per_subproject(self, options: 'coredata.KeyedOptionDictType') -> T.Dict[str, T.Dict[str, 'UserOption']]:
+ result: T.Dict[str, T.Dict[str, 'UserOption']] = {}
+ for k, o in options.items():
+ subproject = k.subproject
+ if k.subproject:
+ k = k.as_root()
+ if o.yielding and k in options:
+ self.yielding_options.add(k)
+ self.all_subprojects.add(subproject)
+ result.setdefault(subproject, {})[str(k)] = o
+ return result
+
+ def _add_line(self, name: OptionKey, value, choices, descr) -> None:
+ self.name_col.append(' ' * self.print_margin + str(name))
+ self.value_col.append(value)
+ self.choices_col.append(choices)
+ self.descr_col.append(descr)
+
+ def add_option(self, name, descr, value, choices):
+ if isinstance(value, list):
+ value = '[{}]'.format(', '.join(make_lower_case(value)))
+ else:
+ value = make_lower_case(value)
+
+ if choices:
+ self.has_choices = True
+ if isinstance(choices, list):
+ choices_list = make_lower_case(choices)
+ current = '['
+ while choices_list:
+ i = choices_list.pop(0)
+ if len(current) + len(i) >= self.max_choices_line_length:
+ self._add_line(name, value, current + ',', descr)
+ name = ''
+ value = ''
+ descr = ''
+ current = ' '
+ if len(current) > 1:
+ current += ', '
+ current += i
+ choices = current + ']'
+ else:
+ choices = make_lower_case(choices)
+ else:
+ choices = ''
+
+ self._add_line(name, value, choices, descr)
+
+ def add_title(self, title):
+ titles = {'descr': 'Description', 'value': 'Current Value', 'choices': 'Possible Values'}
+ if self.default_values_only:
+ titles['value'] = 'Default Value'
+ self._add_line('', '', '', '')
+ self._add_line(title, titles['value'], titles['choices'], titles['descr'])
+ self._add_line('-' * len(title), '-' * len(titles['value']), '-' * len(titles['choices']), '-' * len(titles['descr']))
+
+ def add_section(self, section):
+ self.print_margin = 0
+ self._add_line('', '', '', '')
+ self._add_line(section + ':', '', '', '')
+ self.print_margin = 2
+
+ def print_options(self, title: str, options: 'coredata.KeyedOptionDictType') -> None:
+ if not options:
+ return
+ if title:
+ self.add_title(title)
+ for k, o in sorted(options.items()):
+ printable_value = o.printable_value()
+ if k in self.yielding_options:
+ printable_value = '<inherited from main project>'
+ self.add_option(k, o.description, printable_value, o.choices)
+
+ def print_conf(self):
+ def print_default_values_warning():
+ mlog.warning('The source directory instead of the build directory was specified.')
+ mlog.warning('Only the default values for the project are printed, and all command line parameters are ignored.')
+
+ if self.default_values_only:
+ print_default_values_warning()
+ print('')
+
+ print('Core properties:')
+ print(' Source dir', self.source_dir)
+ if not self.default_values_only:
+ print(' Build dir ', self.build_dir)
+
+ dir_option_names = set(coredata.BUILTIN_DIR_OPTIONS)
+ test_option_names = {OptionKey('errorlogs'),
+ OptionKey('stdsplit')}
+
+ dir_options: 'coredata.KeyedOptionDictType' = {}
+ test_options: 'coredata.KeyedOptionDictType' = {}
+ core_options: 'coredata.KeyedOptionDictType' = {}
+ for k, v in self.coredata.options.items():
+ if k in dir_option_names:
+ dir_options[k] = v
+ elif k in test_option_names:
+ test_options[k] = v
+ elif k.is_builtin():
+ core_options[k] = v
+
+ host_core_options = self.split_options_per_subproject({k: v for k, v in core_options.items() if k.machine is MachineChoice.HOST})
+ build_core_options = self.split_options_per_subproject({k: v for k, v in core_options.items() if k.machine is MachineChoice.BUILD})
+ host_compiler_options = self.split_options_per_subproject({k: v for k, v in self.coredata.options.items() if k.is_compiler() and k.machine is MachineChoice.HOST})
+ build_compiler_options = self.split_options_per_subproject({k: v for k, v in self.coredata.options.items() if k.is_compiler() and k.machine is MachineChoice.BUILD})
+ project_options = self.split_options_per_subproject({k: v for k, v in self.coredata.options.items() if k.is_project()})
+ show_build_options = self.default_values_only or self.build.environment.is_cross_build()
+
+ self.add_section('Main project options')
+ self.print_options('Core options', host_core_options[''])
+ if show_build_options:
+ self.print_options('', build_core_options[''])
+ self.print_options('Backend options', {str(k): v for k, v in self.coredata.options.items() if k.is_backend()})
+ self.print_options('Base options', {str(k): v for k, v in self.coredata.options.items() if k.is_base()})
+ self.print_options('Compiler options', host_compiler_options.get('', {}))
+ if show_build_options:
+ self.print_options('', build_compiler_options.get('', {}))
+ self.print_options('Directories', dir_options)
+ self.print_options('Testing options', test_options)
+ self.print_options('Project options', project_options.get('', {}))
+ for subproject in sorted(self.all_subprojects):
+ if subproject == '':
+ continue
+ self.add_section('Subproject ' + subproject)
+ if subproject in host_core_options:
+ self.print_options('Core options', host_core_options[subproject])
+ if subproject in build_core_options and show_build_options:
+ self.print_options('', build_core_options[subproject])
+ if subproject in host_compiler_options:
+ self.print_options('Compiler options', host_compiler_options[subproject])
+ if subproject in build_compiler_options and show_build_options:
+ self.print_options('', build_compiler_options[subproject])
+ if subproject in project_options:
+ self.print_options('Project options', project_options[subproject])
+ self.print_aligned()
+
+ # Print the warning twice so that the user shouldn't be able to miss it
+ if self.default_values_only:
+ print('')
+ print_default_values_warning()
+
+ self.print_nondefault_buildtype_options()
+
+ def print_nondefault_buildtype_options(self):
+ mismatching = self.coredata.get_nondefault_buildtype_args()
+ if not mismatching:
+ return
+ print("\nThe following option(s) have a different value than the build type default\n")
+ print(f' current default')
+ for m in mismatching:
+ print(f'{m[0]:21}{m[1]:10}{m[2]:10}')
+
+def run(options):
+ coredata.parse_cmd_line_options(options)
+ builddir = os.path.abspath(os.path.realpath(options.builddir))
+ c = None
+ try:
+ c = Conf(builddir)
+ if c.default_values_only:
+ c.print_conf()
+ return 0
+
+ save = False
+ if options.cmd_line_options:
+ c.set_options(options.cmd_line_options)
+ coredata.update_cmd_line_file(builddir, options)
+ save = True
+ elif options.clearcache:
+ c.clear_cache()
+ save = True
+ else:
+ c.print_conf()
+ if save:
+ c.save()
+ mintro.update_build_options(c.coredata, c.build.environment.info_dir)
+ mintro.write_meson_info_file(c.build, [])
+ except ConfException as e:
+ print('Meson configurator encountered an error:')
+ if c is not None and c.build is not None:
+ mintro.write_meson_info_file(c.build, [e])
+ raise e
+ return 0
diff --git a/meson/mesonbuild/mdevenv.py b/meson/mesonbuild/mdevenv.py
new file mode 100644
index 000000000..c304cbb52
--- /dev/null
+++ b/meson/mesonbuild/mdevenv.py
@@ -0,0 +1,78 @@
+import os, subprocess
+import argparse
+import tempfile
+
+from pathlib import Path
+from . import build
+from .mesonlib import MesonException, RealPathAction, is_windows
+
+import typing as T
+
+def add_arguments(parser: argparse.ArgumentParser) -> None:
+ parser.add_argument('-C', dest='wd', action=RealPathAction,
+ help='directory to cd into before running')
+ parser.add_argument('command', nargs=argparse.REMAINDER,
+ help='Command to run in developer environment (default: interactive shell)')
+
+def get_windows_shell() -> str:
+ mesonbuild = Path(__file__).parent
+ script = mesonbuild / 'scripts' / 'cmd_or_ps.ps1'
+ command = ['powershell.exe', '-noprofile', '-executionpolicy', 'bypass', '-file', str(script)]
+ result = subprocess.check_output(command)
+ return result.decode().strip()
+
+def get_env(b: build.Build, build_dir: str) -> T.Dict[str, str]:
+ env = os.environ.copy()
+ for i in b.devenv:
+ env = i.get_env(env)
+
+ extra_env = build.EnvironmentVariables()
+ extra_env.set('MESON_DEVENV', ['1'])
+ extra_env.set('MESON_PROJECT_NAME', [b.project_name])
+
+ meson_uninstalled = Path(build_dir) / 'meson-uninstalled'
+ if meson_uninstalled.is_dir():
+ extra_env.prepend('PKG_CONFIG_PATH', [str(meson_uninstalled)])
+
+ return extra_env.get_env(env)
+
+def run(options: argparse.Namespace) -> int:
+ buildfile = Path(options.wd) / 'meson-private' / 'build.dat'
+ if not buildfile.is_file():
+ raise MesonException(f'Directory {options.wd!r} does not seem to be a Meson build directory.')
+ b = build.load(options.wd)
+
+ devenv = get_env(b, options.wd)
+
+ args = options.command
+ if not args:
+ prompt_prefix = f'[{b.project_name}]'
+ if is_windows():
+ shell = get_windows_shell()
+ if shell == 'powershell.exe':
+ args = ['powershell.exe']
+ args += ['-NoLogo', '-NoExit']
+ prompt = f'function global:prompt {{ "{prompt_prefix} PS " + $PWD + "> "}}'
+ args += ['-Command', prompt]
+ else:
+ args = [os.environ.get("COMSPEC", r"C:\WINDOWS\system32\cmd.exe")]
+ args += ['/k', f'prompt {prompt_prefix} $P$G']
+ else:
+ args = [os.environ.get("SHELL", os.path.realpath("/bin/sh"))]
+ if "bash" in args[0] and not os.environ.get("MESON_DISABLE_PS1_OVERRIDE"):
+ tmprc = tempfile.NamedTemporaryFile(mode='w')
+ bashrc = os.path.expanduser('~/.bashrc')
+ if os.path.exists(bashrc):
+ tmprc.write(f'. {bashrc}\n')
+ tmprc.write(f'export PS1="{prompt_prefix} $PS1"')
+ tmprc.flush()
+ # Let the GC remove the tmp file
+ args.append("--rcfile")
+ args.append(tmprc.name)
+
+ try:
+ return subprocess.call(args, close_fds=False,
+ env=devenv,
+ cwd=options.wd)
+ except subprocess.CalledProcessError as e:
+ return e.returncode
diff --git a/meson/mesonbuild/mdist.py b/meson/mesonbuild/mdist.py
new file mode 100644
index 000000000..afa1b4c56
--- /dev/null
+++ b/meson/mesonbuild/mdist.py
@@ -0,0 +1,319 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import gzip
+import os
+import sys
+import shutil
+import subprocess
+import hashlib
+import json
+from glob import glob
+from pathlib import Path
+from mesonbuild.environment import detect_ninja
+from mesonbuild.mesonlib import (MesonException, RealPathAction, quiet_git,
+ windows_proof_rmtree)
+from mesonbuild.wrap import wrap
+from mesonbuild import mlog, build
+from .scripts.meson_exe import run_exe
+
+archive_choices = ['gztar', 'xztar', 'zip']
+
+archive_extension = {'gztar': '.tar.gz',
+ 'xztar': '.tar.xz',
+ 'zip': '.zip'}
+
+def add_arguments(parser):
+ parser.add_argument('-C', dest='wd', action=RealPathAction,
+ help='directory to cd into before running')
+ parser.add_argument('--formats', default='xztar',
+ help='Comma separated list of archive types to create. Supports xztar (default), gztar, and zip.')
+ parser.add_argument('--include-subprojects', action='store_true',
+ help='Include source code of subprojects that have been used for the build.')
+ parser.add_argument('--no-tests', action='store_true',
+ help='Do not build and test generated packages.')
+
+
+def create_hash(fname):
+ hashname = fname + '.sha256sum'
+ m = hashlib.sha256()
+ m.update(open(fname, 'rb').read())
+ with open(hashname, 'w', encoding='utf-8') as f:
+ # A space and an asterisk because that is the format defined by GNU coreutils
+ # and accepted by busybox and the Perl shasum tool.
+ f.write('{} *{}\n'.format(m.hexdigest(), os.path.basename(fname)))
+
+
+def del_gitfiles(dirname):
+ gitfiles = ('.git', '.gitattributes', '.gitignore', '.gitmodules')
+ for f in glob(os.path.join(dirname, '.git*')):
+ if os.path.split(f)[1] in gitfiles:
+ if os.path.isdir(f) and not os.path.islink(f):
+ windows_proof_rmtree(f)
+ else:
+ os.unlink(f)
+
+def process_submodules(dirname):
+ module_file = os.path.join(dirname, '.gitmodules')
+ if not os.path.exists(module_file):
+ return
+ subprocess.check_call(['git', 'submodule', 'update', '--init', '--recursive'], cwd=dirname)
+ for line in open(module_file, encoding='utf-8'):
+ line = line.strip()
+ if '=' not in line:
+ continue
+ k, v = line.split('=', 1)
+ k = k.strip()
+ v = v.strip()
+ if k != 'path':
+ continue
+ del_gitfiles(os.path.join(dirname, v))
+
+
+def run_dist_scripts(src_root, bld_root, dist_root, dist_scripts, subprojects):
+ assert(os.path.isabs(dist_root))
+ env = {}
+ env['MESON_DIST_ROOT'] = dist_root
+ env['MESON_SOURCE_ROOT'] = src_root
+ env['MESON_BUILD_ROOT'] = bld_root
+ for d in dist_scripts:
+ if d.subproject and d.subproject not in subprojects:
+ continue
+ subdir = subprojects.get(d.subproject, '')
+ env['MESON_PROJECT_DIST_ROOT'] = os.path.join(dist_root, subdir)
+ env['MESON_PROJECT_SOURCE_ROOT'] = os.path.join(src_root, subdir)
+ env['MESON_PROJECT_BUILD_ROOT'] = os.path.join(bld_root, subdir)
+ name = ' '.join(d.cmd_args)
+ print(f'Running custom dist script {name!r}')
+ try:
+ rc = run_exe(d, env)
+ if rc != 0:
+ sys.exit('Dist script errored out')
+ except OSError:
+ print(f'Failed to run dist script {name!r}')
+ sys.exit(1)
+
+def git_root(src_root):
+ # Cannot use --show-toplevel here because git in our CI prints cygwin paths
+ # that python cannot resolve. Workaround this by taking parent of src_root.
+ prefix = quiet_git(['rev-parse', '--show-prefix'], src_root, check=True)[1].strip()
+ if not prefix:
+ return Path(src_root)
+ prefix_level = len(Path(prefix).parents)
+ return Path(src_root).parents[prefix_level - 1]
+
+def is_git(src_root):
+ '''
+ Checks if meson.build file at the root source directory is tracked by git.
+ It could be a subproject part of the parent project git repository.
+ '''
+ return quiet_git(['ls-files', '--error-unmatch', 'meson.build'], src_root)[0]
+
+def git_have_dirty_index(src_root):
+ '''Check whether there are uncommitted changes in git'''
+ ret = subprocess.call(['git', '-C', src_root, 'diff-index', '--quiet', 'HEAD'])
+ return ret == 1
+
+def git_clone(src_root, distdir):
+ if git_have_dirty_index(src_root):
+ mlog.warning('Repository has uncommitted changes that will not be included in the dist tarball')
+ if os.path.exists(distdir):
+ windows_proof_rmtree(distdir)
+ repo_root = git_root(src_root)
+ if repo_root.samefile(src_root):
+ os.makedirs(distdir)
+ subprocess.check_call(['git', 'clone', '--shared', src_root, distdir])
+ else:
+ subdir = Path(src_root).relative_to(repo_root)
+ tmp_distdir = distdir + '-tmp'
+ if os.path.exists(tmp_distdir):
+ windows_proof_rmtree(tmp_distdir)
+ os.makedirs(tmp_distdir)
+ subprocess.check_call(['git', 'clone', '--shared', '--no-checkout', str(repo_root), tmp_distdir])
+ subprocess.check_call(['git', 'checkout', 'HEAD', '--', str(subdir)], cwd=tmp_distdir)
+ Path(tmp_distdir, subdir).rename(distdir)
+ windows_proof_rmtree(tmp_distdir)
+ process_submodules(distdir)
+ del_gitfiles(distdir)
+
+def create_dist_git(dist_name, archives, src_root, bld_root, dist_sub, dist_scripts, subprojects):
+ distdir = os.path.join(dist_sub, dist_name)
+ git_clone(src_root, distdir)
+ for path in subprojects.values():
+ sub_src_root = os.path.join(src_root, path)
+ sub_distdir = os.path.join(distdir, path)
+ if os.path.exists(sub_distdir):
+ continue
+ if is_git(sub_src_root):
+ git_clone(sub_src_root, sub_distdir)
+ else:
+ shutil.copytree(sub_src_root, sub_distdir)
+ run_dist_scripts(src_root, bld_root, distdir, dist_scripts, subprojects)
+ output_names = []
+ for a in archives:
+ compressed_name = distdir + archive_extension[a]
+ shutil.make_archive(distdir, a, root_dir=dist_sub, base_dir=dist_name)
+ output_names.append(compressed_name)
+ windows_proof_rmtree(distdir)
+ return output_names
+
+def is_hg(src_root):
+ return os.path.isdir(os.path.join(src_root, '.hg'))
+
+def hg_have_dirty_index(src_root):
+ '''Check whether there are uncommitted changes in hg'''
+ out = subprocess.check_output(['hg', '-R', src_root, 'summary'])
+ return b'commit: (clean)' not in out
+
+def create_dist_hg(dist_name, archives, src_root, bld_root, dist_sub, dist_scripts):
+ if hg_have_dirty_index(src_root):
+ mlog.warning('Repository has uncommitted changes that will not be included in the dist tarball')
+ if dist_scripts:
+ mlog.warning('dist scripts are not supported in Mercurial projects')
+
+ os.makedirs(dist_sub, exist_ok=True)
+ tarname = os.path.join(dist_sub, dist_name + '.tar')
+ xzname = tarname + '.xz'
+ gzname = tarname + '.gz'
+ zipname = os.path.join(dist_sub, dist_name + '.zip')
+ # Note that -X interprets relative paths using the current working
+ # directory, not the repository root, so this must be an absolute path:
+ # https://bz.mercurial-scm.org/show_bug.cgi?id=6267
+ #
+ # .hg[a-z]* is used instead of .hg* to keep .hg_archival.txt, which may
+ # be useful to link the tarball to the Mercurial revision for either
+ # manual inspection or in case any code interprets it for a --version or
+ # similar.
+ subprocess.check_call(['hg', 'archive', '-R', src_root, '-S', '-t', 'tar',
+ '-X', src_root + '/.hg[a-z]*', tarname])
+ output_names = []
+ if 'xztar' in archives:
+ import lzma
+ with lzma.open(xzname, 'wb') as xf, open(tarname, 'rb') as tf:
+ shutil.copyfileobj(tf, xf)
+ output_names.append(xzname)
+ if 'gztar' in archives:
+ with gzip.open(gzname, 'wb') as zf, open(tarname, 'rb') as tf:
+ shutil.copyfileobj(tf, zf)
+ output_names.append(gzname)
+ os.unlink(tarname)
+ if 'zip' in archives:
+ subprocess.check_call(['hg', 'archive', '-R', src_root, '-S', '-t', 'zip', zipname])
+ output_names.append(zipname)
+ return output_names
+
+def run_dist_steps(meson_command, unpacked_src_dir, builddir, installdir, ninja_args):
+ if subprocess.call(meson_command + ['--backend=ninja', unpacked_src_dir, builddir]) != 0:
+ print('Running Meson on distribution package failed')
+ return 1
+ if subprocess.call(ninja_args, cwd=builddir) != 0:
+ print('Compiling the distribution package failed')
+ return 1
+ if subprocess.call(ninja_args + ['test'], cwd=builddir) != 0:
+ print('Running unit tests on the distribution package failed')
+ return 1
+ myenv = os.environ.copy()
+ myenv['DESTDIR'] = installdir
+ if subprocess.call(ninja_args + ['install'], cwd=builddir, env=myenv) != 0:
+ print('Installing the distribution package failed')
+ return 1
+ return 0
+
+def check_dist(packagename, meson_command, extra_meson_args, bld_root, privdir):
+ print(f'Testing distribution package {packagename}')
+ unpackdir = os.path.join(privdir, 'dist-unpack')
+ builddir = os.path.join(privdir, 'dist-build')
+ installdir = os.path.join(privdir, 'dist-install')
+ for p in (unpackdir, builddir, installdir):
+ if os.path.exists(p):
+ windows_proof_rmtree(p)
+ os.mkdir(p)
+ ninja_args = detect_ninja()
+ shutil.unpack_archive(packagename, unpackdir)
+ unpacked_files = glob(os.path.join(unpackdir, '*'))
+ assert(len(unpacked_files) == 1)
+ unpacked_src_dir = unpacked_files[0]
+ with open(os.path.join(bld_root, 'meson-info', 'intro-buildoptions.json'), encoding='utf-8') as boptions:
+ meson_command += ['-D{name}={value}'.format(**o) for o in json.load(boptions)
+ if o['name'] not in ['backend', 'install_umask', 'buildtype']]
+ meson_command += extra_meson_args
+
+ ret = run_dist_steps(meson_command, unpacked_src_dir, builddir, installdir, ninja_args)
+ if ret > 0:
+ print(f'Dist check build directory was {builddir}')
+ else:
+ windows_proof_rmtree(unpackdir)
+ windows_proof_rmtree(builddir)
+ windows_proof_rmtree(installdir)
+ print(f'Distribution package {packagename} tested')
+ return ret
+
+def determine_archives_to_generate(options):
+ result = []
+ for i in options.formats.split(','):
+ if i not in archive_choices:
+ sys.exit(f'Value "{i}" not one of permitted values {archive_choices}.')
+ result.append(i)
+ if len(i) == 0:
+ sys.exit('No archive types specified.')
+ return result
+
+def run(options):
+ buildfile = Path(options.wd) / 'meson-private' / 'build.dat'
+ if not buildfile.is_file():
+ raise MesonException(f'Directory {options.wd!r} does not seem to be a Meson build directory.')
+ b = build.load(options.wd)
+ # This import must be load delayed, otherwise it will get the default
+ # value of None.
+ from mesonbuild.mesonlib import get_meson_command
+ src_root = b.environment.source_dir
+ bld_root = b.environment.build_dir
+ priv_dir = os.path.join(bld_root, 'meson-private')
+ dist_sub = os.path.join(bld_root, 'meson-dist')
+
+ dist_name = b.project_name + '-' + b.project_version
+
+ archives = determine_archives_to_generate(options)
+
+ subprojects = {}
+ extra_meson_args = []
+ if options.include_subprojects:
+ subproject_dir = os.path.join(src_root, b.subproject_dir)
+ for sub in b.subprojects:
+ directory = wrap.get_directory(subproject_dir, sub)
+ subprojects[sub] = os.path.join(b.subproject_dir, directory)
+ extra_meson_args.append('-Dwrap_mode=nodownload')
+
+ if is_git(src_root):
+ names = create_dist_git(dist_name, archives, src_root, bld_root, dist_sub, b.dist_scripts, subprojects)
+ elif is_hg(src_root):
+ if subprojects:
+ print('--include-subprojects option currently not supported with Mercurial')
+ return 1
+ names = create_dist_hg(dist_name, archives, src_root, bld_root, dist_sub, b.dist_scripts)
+ else:
+ print('Dist currently only works with Git or Mercurial repos')
+ return 1
+ if names is None:
+ return 1
+ rc = 0
+ if not options.no_tests:
+ # Check only one.
+ rc = check_dist(names[0], get_meson_command(), extra_meson_args, bld_root, priv_dir)
+ if rc == 0:
+ for name in names:
+ create_hash(name)
+ print('Created', name)
+ return rc
diff --git a/meson/mesonbuild/mesondata.py b/meson/mesonbuild/mesondata.py
new file mode 100644
index 000000000..43b7bde7a
--- /dev/null
+++ b/meson/mesonbuild/mesondata.py
@@ -0,0 +1,394 @@
+# Copyright 2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+####
+#### WARNING: This is an automatically generated file! Do not edit!
+#### Generated by tools/gen_data.py
+####
+
+
+# TODO: Remember to remove this also from tools/gen_data.py
+from pathlib import Path
+import typing as T
+
+if T.TYPE_CHECKING:
+ from .environment import Environment
+
+######################
+# BEGIN Data section #
+######################
+
+file_0_data_preload_cmake = '''\
+if(MESON_PS_LOADED)
+ return()
+endif()
+
+set(MESON_PS_LOADED ON)
+
+cmake_policy(PUSH)
+cmake_policy(SET CMP0054 NEW) # https://cmake.org/cmake/help/latest/policy/CMP0054.html
+
+# Dummy macros that have a special meaning in the meson code
+macro(meson_ps_execute_delayed_calls)
+endmacro()
+
+macro(meson_ps_reload_vars)
+endmacro()
+
+macro(meson_ps_disabled_function)
+ message(WARNING "The function '${ARGV0}' is disabled in the context of CMake subprojects.\n"
+ "This should not be an issue but may lead to compilation errors.")
+endmacro()
+
+# Helper macro to inspect the current CMake state
+macro(meson_ps_inspect_vars)
+ set(MESON_PS_CMAKE_CURRENT_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}")
+ set(MESON_PS_CMAKE_CURRENT_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}")
+ meson_ps_execute_delayed_calls()
+endmacro()
+
+
+# Override some system functions with custom code and forward the args
+# to the original function
+macro(add_custom_command)
+ meson_ps_inspect_vars()
+ _add_custom_command(${ARGV})
+endmacro()
+
+macro(add_custom_target)
+ meson_ps_inspect_vars()
+ _add_custom_target(${ARGV})
+endmacro()
+
+macro(set_property)
+ meson_ps_inspect_vars()
+ _set_property(${ARGV})
+endmacro()
+
+function(set_source_files_properties)
+ set(FILES)
+ set(I 0)
+ set(PROPERTIES OFF)
+
+ while(I LESS ARGC)
+ if(NOT PROPERTIES)
+ if("${ARGV${I}}" STREQUAL "PROPERTIES")
+ set(PROPERTIES ON)
+ else()
+ list(APPEND FILES "${ARGV${I}}")
+ endif()
+
+ math(EXPR I "${I} + 1")
+ else()
+ set(ID_IDX ${I})
+ math(EXPR PROP_IDX "${ID_IDX} + 1")
+
+ set(ID "${ARGV${ID_IDX}}")
+ set(PROP "${ARGV${PROP_IDX}}")
+
+ set_property(SOURCE ${FILES} PROPERTY "${ID}" "${PROP}")
+ math(EXPR I "${I} + 2")
+ endif()
+ endwhile()
+endfunction()
+
+# Disable some functions that would mess up the CMake meson integration
+macro(target_precompile_headers)
+ meson_ps_disabled_function(target_precompile_headers)
+endmacro()
+
+set(MESON_PS_DELAYED_CALLS add_custom_command;add_custom_target;set_property)
+meson_ps_reload_vars()
+
+cmake_policy(POP)
+'''
+
+file_1_data_CMakeLists_txt = '''\
+# fail noisily if attempt to use this file without setting:
+# cmake_minimum_required(VERSION ${CMAKE_VERSION})
+# project(... LANGUAGES ...)
+
+cmake_policy(SET CMP0000 NEW)
+
+set(PACKAGE_FOUND FALSE)
+set(_packageName "${NAME}")
+string(TOUPPER "${_packageName}" PACKAGE_NAME)
+
+while(TRUE)
+ if ("${VERSION}" STREQUAL "")
+ find_package("${NAME}" QUIET COMPONENTS ${COMPS})
+ else()
+ find_package("${NAME}" "${VERSION}" QUIET COMPONENTS ${COMPS})
+ endif()
+
+ # ARCHS has to be set via the CMD interface
+ if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND OR "${ARCHS}" STREQUAL "")
+ break()
+ endif()
+
+ list(GET ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+ list(REMOVE_AT ARCHS 0)
+endwhile()
+
+if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND)
+ set(PACKAGE_FOUND TRUE)
+
+ # Check the following variables:
+ # FOO_VERSION
+ # Foo_VERSION
+ # FOO_VERSION_STRING
+ # Foo_VERSION_STRING
+ if(NOT DEFINED PACKAGE_VERSION)
+ if(DEFINED ${_packageName}_VERSION)
+ set(PACKAGE_VERSION "${${_packageName}_VERSION}")
+ elseif(DEFINED ${PACKAGE_NAME}_VERSION)
+ set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION}")
+ elseif(DEFINED ${_packageName}_VERSION_STRING)
+ set(PACKAGE_VERSION "${${_packageName}_VERSION_STRING}")
+ elseif(DEFINED ${PACKAGE_NAME}_VERSION_STRING)
+ set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION_STRING}")
+ endif()
+ endif()
+
+ # Check the following variables:
+ # FOO_LIBRARIES
+ # Foo_LIBRARIES
+ # FOO_LIBS
+ # Foo_LIBS
+ set(libs)
+ if(DEFINED ${_packageName}_LIBRARIES)
+ set(libs ${_packageName}_LIBRARIES)
+ elseif(DEFINED ${PACKAGE_NAME}_LIBRARIES)
+ set(libs ${PACKAGE_NAME}_LIBRARIES)
+ elseif(DEFINED ${_packageName}_LIBS)
+ set(libs ${_packageName}_LIBS)
+ elseif(DEFINED ${PACKAGE_NAME}_LIBS)
+ set(libs ${PACKAGE_NAME}_LIBS)
+ endif()
+
+ # Check the following variables:
+ # FOO_INCLUDE_DIRS
+ # Foo_INCLUDE_DIRS
+ # FOO_INCLUDES
+ # Foo_INCLUDES
+ # FOO_INCLUDE_DIR
+ # Foo_INCLUDE_DIR
+ set(includes)
+ if(DEFINED ${_packageName}_INCLUDE_DIRS)
+ set(includes ${_packageName}_INCLUDE_DIRS)
+ elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIRS)
+ set(includes ${PACKAGE_NAME}_INCLUDE_DIRS)
+ elseif(DEFINED ${_packageName}_INCLUDES)
+ set(includes ${_packageName}_INCLUDES)
+ elseif(DEFINED ${PACKAGE_NAME}_INCLUDES)
+ set(includes ${PACKAGE_NAME}_INCLUDES)
+ elseif(DEFINED ${_packageName}_INCLUDE_DIR)
+ set(includes ${_packageName}_INCLUDE_DIR)
+ elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIR)
+ set(includes ${PACKAGE_NAME}_INCLUDE_DIR)
+ endif()
+
+ # Check the following variables:
+ # FOO_DEFINITIONS
+ # Foo_DEFINITIONS
+ set(definitions)
+ if(DEFINED ${_packageName}_DEFINITIONS)
+ set(definitions ${_packageName}_DEFINITIONS)
+ elseif(DEFINED ${PACKAGE_NAME}_DEFINITIONS)
+ set(definitions ${PACKAGE_NAME}_DEFINITIONS)
+ endif()
+
+ set(PACKAGE_INCLUDE_DIRS "${${includes}}")
+ set(PACKAGE_DEFINITIONS "${${definitions}}")
+ set(PACKAGE_LIBRARIES "${${libs}}")
+endif()
+'''
+
+file_2_data_CMakeListsLLVM_txt = '''\
+cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION} )
+
+set(PACKAGE_FOUND FALSE)
+
+while(TRUE)
+ find_package(LLVM REQUIRED CONFIG QUIET)
+
+ # ARCHS has to be set via the CMD interface
+ if(LLVM_FOUND OR "${ARCHS}" STREQUAL "")
+ break()
+ endif()
+
+ list(GET ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+ list(REMOVE_AT ARCHS 0)
+endwhile()
+
+if(LLVM_FOUND)
+ set(PACKAGE_FOUND TRUE)
+
+ foreach(mod IN LISTS LLVM_MESON_MODULES)
+ # Reset variables
+ set(out_mods)
+ set(real_mods)
+
+ # Generate a lower and upper case version
+ string(TOLOWER "${mod}" mod_L)
+ string(TOUPPER "${mod}" mod_U)
+
+ # Get the mapped components
+ llvm_map_components_to_libnames(out_mods ${mod} ${mod_L} ${mod_U})
+ list(SORT out_mods)
+ list(REMOVE_DUPLICATES out_mods)
+
+ # Make sure that the modules exist
+ foreach(i IN LISTS out_mods)
+ if(TARGET ${i})
+ list(APPEND real_mods ${i})
+ endif()
+ endforeach()
+
+ # Set the output variables
+ set(MESON_LLVM_TARGETS_${mod} ${real_mods})
+ foreach(i IN LISTS real_mods)
+ set(MESON_TARGET_TO_LLVM_${i} ${mod})
+ endforeach()
+ endforeach()
+
+ # Check the following variables:
+ # LLVM_PACKAGE_VERSION
+ # LLVM_VERSION
+ # LLVM_VERSION_STRING
+ if(NOT DEFINED PACKAGE_VERSION)
+ if(DEFINED LLVM_PACKAGE_VERSION)
+ set(PACKAGE_VERSION "${LLVM_PACKAGE_VERSION}")
+ elseif(DEFINED LLVM_VERSION)
+ set(PACKAGE_VERSION "${LLVM_VERSION}")
+ elseif(DEFINED LLVM_VERSION_STRING)
+ set(PACKAGE_VERSION "${LLVM_VERSION_STRING}")
+ endif()
+ endif()
+
+ # Check the following variables:
+ # LLVM_LIBRARIES
+ # LLVM_LIBS
+ set(libs)
+ if(DEFINED LLVM_LIBRARIES)
+ set(libs LLVM_LIBRARIES)
+ elseif(DEFINED LLVM_LIBS)
+ set(libs LLVM_LIBS)
+ endif()
+
+ # Check the following variables:
+ # LLVM_INCLUDE_DIRS
+ # LLVM_INCLUDES
+ # LLVM_INCLUDE_DIR
+ set(includes)
+ if(DEFINED LLVM_INCLUDE_DIRS)
+ set(includes LLVM_INCLUDE_DIRS)
+ elseif(DEFINED LLVM_INCLUDES)
+ set(includes LLVM_INCLUDES)
+ elseif(DEFINED LLVM_INCLUDE_DIR)
+ set(includes LLVM_INCLUDE_DIR)
+ endif()
+
+ # Check the following variables:
+ # LLVM_DEFINITIONS
+ set(definitions)
+ if(DEFINED LLVM_DEFINITIONS)
+ set(definitions LLVM_DEFINITIONS)
+ endif()
+
+ set(PACKAGE_INCLUDE_DIRS "${${includes}}")
+ set(PACKAGE_DEFINITIONS "${${definitions}}")
+ set(PACKAGE_LIBRARIES "${${libs}}")
+endif()
+'''
+
+file_3_data_CMakePathInfo_txt = '''\
+cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION})
+
+set(TMP_PATHS_LIST)
+list(APPEND TMP_PATHS_LIST ${CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_APPBUNDLE_PATH})
+
+set(LIB_ARCH_LIST)
+if(CMAKE_LIBRARY_ARCHITECTURE_REGEX)
+ file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* )
+ foreach(dir ${implicit_dirs})
+ if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}")
+ list(APPEND LIB_ARCH_LIST "${dir}")
+ endif()
+ endforeach()
+endif()
+
+# "Export" these variables:
+set(MESON_ARCH_LIST ${LIB_ARCH_LIST})
+set(MESON_PATHS_LIST ${TMP_PATHS_LIST})
+set(MESON_CMAKE_ROOT ${CMAKE_ROOT})
+set(MESON_CMAKE_SYSROOT ${CMAKE_SYSROOT})
+set(MESON_FIND_ROOT_PATH ${CMAKE_FIND_ROOT_PATH})
+
+message(STATUS ${TMP_PATHS_LIST})
+'''
+
+
+####################
+# END Data section #
+####################
+
+class DataFile:
+ def __init__(self, path: Path, sha256sum: str, data: str) -> None:
+ self.path = path
+ self.sha256sum = sha256sum
+ self.data = data
+
+ def write_once(self, path: Path) -> None:
+ if not path.exists():
+ path.write_text(self.data, encoding='utf-8')
+
+ def write_to_private(self, env: 'Environment') -> Path:
+ out_file = Path(env.scratch_dir) / 'data' / self.path.name
+ out_file.parent.mkdir(exist_ok=True)
+ self.write_once(out_file)
+ return out_file
+
+
+mesondata = {
+ 'cmake/data/preload.cmake': DataFile(
+ Path('cmake/data/preload.cmake'),
+ 'ce8f30159aab25b92c26c58a219a427d47838bfa0739475221d6c8993b4946e5',
+ file_0_data_preload_cmake,
+ ),
+ 'dependencies/data/CMakeLists.txt': DataFile(
+ Path('dependencies/data/CMakeLists.txt'),
+ '4dca24afa13e9311f0598a6ac29690490819bd7d82cfdaa0a2fe5eea3c0fa0d5',
+ file_1_data_CMakeLists_txt,
+ ),
+ 'dependencies/data/CMakeListsLLVM.txt': DataFile(
+ Path('dependencies/data/CMakeListsLLVM.txt'),
+ '412cec3315597041a978d018cdaca282dcd47693793540da88ae2f80d0cbd7cd',
+ file_2_data_CMakeListsLLVM_txt,
+ ),
+ 'dependencies/data/CMakePathInfo.txt': DataFile(
+ Path('dependencies/data/CMakePathInfo.txt'),
+ '90da8b443982d9c87139b7dc84228eb58cab4315764949637208f25e2bda7db2',
+ file_3_data_CMakePathInfo_txt,
+ ),
+}
diff --git a/meson/mesonbuild/mesonlib/__init__.py b/meson/mesonbuild/mesonlib/__init__.py
new file mode 100644
index 000000000..5b646b549
--- /dev/null
+++ b/meson/mesonbuild/mesonlib/__init__.py
@@ -0,0 +1,30 @@
+# SPDX-license-identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper functions and classes."""
+
+import os
+
+from .universal import *
+
+# Here we import either the posix implementations, the windows implementations,
+# or a generic no-op implementation
+if os.name == 'posix':
+ from .posix import *
+elif os.name == 'nt':
+ from .win32 import *
+else:
+ from .platform import *
diff --git a/meson/mesonbuild/mesonlib/platform.py b/meson/mesonbuild/mesonlib/platform.py
new file mode 100644
index 000000000..cdd42b102
--- /dev/null
+++ b/meson/mesonbuild/mesonlib/platform.py
@@ -0,0 +1,37 @@
+# SPDX-license-identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""base classes providing no-op functionality.."""
+
+import os
+import typing as T
+
+from .. import mlog
+
+__all__ = ['BuildDirLock']
+
+# This needs to be inheritted by the specific implementations to make type
+# checking happy
+class BuildDirLock:
+
+ def __init__(self, builddir: str) -> None:
+ self.lockfilename = os.path.join(builddir, 'meson-private/meson.lock')
+
+ def __enter__(self) -> None:
+ mlog.debug('Calling ther no-op version of BuildDirLock')
+
+ def __exit__(self, *args: T.Any) -> None:
+ pass
diff --git a/meson/mesonbuild/mesonlib/posix.py b/meson/mesonbuild/mesonlib/posix.py
new file mode 100644
index 000000000..67f9a442b
--- /dev/null
+++ b/meson/mesonbuild/mesonlib/posix.py
@@ -0,0 +1,39 @@
+# SPDX-license-identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Posix specific implementations of mesonlib functionality."""
+
+import fcntl
+import typing as T
+
+from .universal import MesonException
+from .platform import BuildDirLock as BuildDirLockBase
+
+__all__ = ['BuildDirLock']
+
+class BuildDirLock(BuildDirLockBase):
+
+ def __enter__(self) -> None:
+ self.lockfile = open(self.lockfilename, 'w', encoding='utf-8')
+ try:
+ fcntl.flock(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
+ except (BlockingIOError, PermissionError):
+ self.lockfile.close()
+ raise MesonException('Some other Meson process is already using this build directory. Exiting.')
+
+ def __exit__(self, *args: T.Any) -> None:
+ fcntl.flock(self.lockfile, fcntl.LOCK_UN)
+ self.lockfile.close()
diff --git a/meson/mesonbuild/mesonlib/universal.py b/meson/mesonbuild/mesonlib/universal.py
new file mode 100644
index 000000000..d670d04c7
--- /dev/null
+++ b/meson/mesonbuild/mesonlib/universal.py
@@ -0,0 +1,2190 @@
+# Copyright 2012-2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A library of random helper functionality."""
+from pathlib import Path
+import argparse
+import enum
+import sys
+import stat
+import time
+import abc
+import platform, subprocess, operator, os, shlex, shutil, re
+import collections
+from functools import lru_cache, wraps, total_ordering
+from itertools import tee, filterfalse
+from tempfile import TemporaryDirectory
+import typing as T
+import uuid
+import textwrap
+
+from mesonbuild import mlog
+
+if T.TYPE_CHECKING:
+ from .._typing import ImmutableListProtocol
+ from ..build import ConfigurationData
+ from ..coredata import KeyedOptionDictType, UserOption
+ from ..compilers.compilers import CompilerType
+ from ..interpreterbase import ObjectHolder
+
+FileOrString = T.Union['File', str]
+
+_T = T.TypeVar('_T')
+_U = T.TypeVar('_U')
+
+__all__ = [
+ 'GIT',
+ 'an_unpicklable_object',
+ 'python_command',
+ 'project_meson_versions',
+ 'HoldableObject',
+ 'SecondLevelHolder',
+ 'File',
+ 'FileMode',
+ 'GitException',
+ 'LibType',
+ 'MachineChoice',
+ 'MesonException',
+ 'MesonBugException',
+ 'EnvironmentException',
+ 'FileOrString',
+ 'GitException',
+ 'OptionKey',
+ 'dump_conf_header',
+ 'OptionOverrideProxy',
+ 'OptionProxy',
+ 'OptionType',
+ 'OrderedSet',
+ 'PerMachine',
+ 'PerMachineDefaultable',
+ 'PerThreeMachine',
+ 'PerThreeMachineDefaultable',
+ 'ProgressBar',
+ 'RealPathAction',
+ 'TemporaryDirectoryWinProof',
+ 'Version',
+ 'check_direntry_issues',
+ 'classify_unity_sources',
+ 'current_vs_supports_modules',
+ 'darwin_get_object_archs',
+ 'default_libdir',
+ 'default_libexecdir',
+ 'default_prefix',
+ 'detect_subprojects',
+ 'detect_vcs',
+ 'do_conf_file',
+ 'do_conf_str',
+ 'do_define',
+ 'do_replacement',
+ 'exe_exists',
+ 'expand_arguments',
+ 'extract_as_list',
+ 'get_compiler_for_source',
+ 'get_filenames_templates_dict',
+ 'get_library_dirs',
+ 'get_variable_regex',
+ 'get_wine_shortpath',
+ 'git',
+ 'has_path_sep',
+ 'is_aix',
+ 'is_android',
+ 'is_ascii_string',
+ 'is_cygwin',
+ 'is_debianlike',
+ 'is_dragonflybsd',
+ 'is_freebsd',
+ 'is_haiku',
+ 'is_hurd',
+ 'is_irix',
+ 'is_linux',
+ 'is_netbsd',
+ 'is_openbsd',
+ 'is_osx',
+ 'is_qnx',
+ 'is_sunos',
+ 'is_windows',
+ 'is_wsl',
+ 'iter_regexin_iter',
+ 'join_args',
+ 'listify',
+ 'partition',
+ 'path_is_in_root',
+ 'Popen_safe',
+ 'quiet_git',
+ 'quote_arg',
+ 'relative_to_if_possible',
+ 'relpath',
+ 'replace_if_different',
+ 'run_once',
+ 'get_meson_command',
+ 'set_meson_command',
+ 'split_args',
+ 'stringlistify',
+ 'substitute_values',
+ 'substring_is_in_list',
+ 'typeslistify',
+ 'verbose_git',
+ 'version_compare',
+ 'version_compare_condition_with_min',
+ 'version_compare_many',
+ 'search_version',
+ 'windows_proof_rm',
+ 'windows_proof_rmtree',
+]
+
+
+# TODO: this is such a hack, this really should be either in coredata or in the
+# interpreter
+# {subproject: project_meson_version}
+project_meson_versions = collections.defaultdict(str) # type: T.DefaultDict[str, str]
+
+
+from glob import glob
+
+if os.path.basename(sys.executable) == 'meson.exe':
+ # In Windows and using the MSI installed executable.
+ python_command = [sys.executable, 'runpython']
+else:
+ python_command = [sys.executable]
+_meson_command = None
+
+class MesonException(Exception):
+ '''Exceptions thrown by Meson'''
+
+ def __init__(self, *args: object, file: T.Optional[str] = None,
+ lineno: T.Optional[int] = None, colno: T.Optional[int] = None):
+ super().__init__(*args)
+ self.file = file
+ self.lineno = lineno
+ self.colno = colno
+
+
+class MesonBugException(MesonException):
+ '''Exceptions thrown when there is a clear Meson bug that should be reported'''
+
+ def __init__(self, msg: str, file: T.Optional[str] = None,
+ lineno: T.Optional[int] = None, colno: T.Optional[int] = None):
+ super().__init__(msg + '\n\n This is a Meson bug and should be reported!',
+ file=file, lineno=lineno, colno=colno)
+
+class EnvironmentException(MesonException):
+ '''Exceptions thrown while processing and creating the build environment'''
+
+class GitException(MesonException):
+ def __init__(self, msg: str, output: T.Optional[str] = None):
+ super().__init__(msg)
+ self.output = output.strip() if output else ''
+
+GIT = shutil.which('git')
+def git(cmd: T.List[str], workingdir: str, check: bool = False, **kwargs: T.Any) -> T.Tuple[subprocess.Popen, str, str]:
+ cmd = [GIT] + cmd
+ p, o, e = Popen_safe(cmd, cwd=workingdir, **kwargs)
+ if check and p.returncode != 0:
+ raise GitException('Git command failed: ' + str(cmd), e)
+ return p, o, e
+
+def quiet_git(cmd: T.List[str], workingdir: str, check: bool = False) -> T.Tuple[bool, str]:
+ if not GIT:
+ m = 'Git program not found.'
+ if check:
+ raise GitException(m)
+ return False, m
+ p, o, e = git(cmd, workingdir, check)
+ if p.returncode != 0:
+ return False, e
+ return True, o
+
+def verbose_git(cmd: T.List[str], workingdir: str, check: bool = False) -> bool:
+ if not GIT:
+ m = 'Git program not found.'
+ if check:
+ raise GitException(m)
+ return False
+ p, _, _ = git(cmd, workingdir, check, stdout=None, stderr=None)
+ return p.returncode == 0
+
+def set_meson_command(mainfile: str) -> None:
+ global python_command
+ global _meson_command
+ # On UNIX-like systems `meson` is a Python script
+ # On Windows `meson` and `meson.exe` are wrapper exes
+ if not mainfile.endswith('.py'):
+ _meson_command = [mainfile]
+ elif os.path.isabs(mainfile) and mainfile.endswith('mesonmain.py'):
+ # Can't actually run meson with an absolute path to mesonmain.py, it must be run as -m mesonbuild.mesonmain
+ _meson_command = python_command + ['-m', 'mesonbuild.mesonmain']
+ else:
+ # Either run uninstalled, or full path to meson-script.py
+ _meson_command = python_command + [mainfile]
+ # We print this value for unit tests.
+ if 'MESON_COMMAND_TESTS' in os.environ:
+ mlog.log(f'meson_command is {_meson_command!r}')
+
+
+def get_meson_command() -> T.Optional[T.List[str]]:
+ return _meson_command
+
+
+def is_ascii_string(astring: T.Union[str, bytes]) -> bool:
+ try:
+ if isinstance(astring, str):
+ astring.encode('ascii')
+ elif isinstance(astring, bytes):
+ astring.decode('ascii')
+ except UnicodeDecodeError:
+ return False
+ return True
+
+
+def check_direntry_issues(direntry_array: T.Union[T.List[T.Union[str, bytes]], str, bytes]) -> None:
+ import locale
+ # Warn if the locale is not UTF-8. This can cause various unfixable issues
+ # such as os.stat not being able to decode filenames with unicode in them.
+ # There is no way to reset both the preferred encoding and the filesystem
+ # encoding, so we can just warn about it.
+ e = locale.getpreferredencoding()
+ if e.upper() != 'UTF-8' and not is_windows():
+ if not isinstance(direntry_array, list):
+ direntry_array = [direntry_array]
+ for de in direntry_array:
+ if is_ascii_string(de):
+ continue
+ mlog.warning(textwrap.dedent(f'''
+ You are using {e!r} which is not a Unicode-compatible
+ locale but you are trying to access a file system entry called {de!r} which is
+ not pure ASCII. This may cause problems.
+ '''), file=sys.stderr)
+
+
+# Put this in objects that should not get dumped to pickle files
+# by accident.
+import threading
+an_unpicklable_object = threading.Lock()
+
+class HoldableObject(metaclass=abc.ABCMeta):
+ ''' Dummy base class for all objects that can be
+ held by an interpreter.baseobjects.ObjectHolder '''
+
+class SecondLevelHolder(HoldableObject, metaclass=abc.ABCMeta):
+ ''' A second level object holder. The primary purpose
+ of such objects is to hold multiple objects with one
+ default option. '''
+
+ @abc.abstractmethod
+ def get_default_object(self) -> HoldableObject: ...
+
+class FileMode:
+ # The first triad is for owner permissions, the second for group permissions,
+ # and the third for others (everyone else).
+ # For the 1st character:
+ # 'r' means can read
+ # '-' means not allowed
+ # For the 2nd character:
+ # 'w' means can write
+ # '-' means not allowed
+ # For the 3rd character:
+ # 'x' means can execute
+ # 's' means can execute and setuid/setgid is set (owner/group triads only)
+ # 'S' means cannot execute and setuid/setgid is set (owner/group triads only)
+ # 't' means can execute and sticky bit is set ("others" triads only)
+ # 'T' means cannot execute and sticky bit is set ("others" triads only)
+ # '-' means none of these are allowed
+ #
+ # The meanings of 'rwx' perms is not obvious for directories; see:
+ # https://www.hackinglinuxexposed.com/articles/20030424.html
+ #
+ # For information on this notation such as setuid/setgid/sticky bits, see:
+ # https://en.wikipedia.org/wiki/File_system_permissions#Symbolic_notation
+ symbolic_perms_regex = re.compile('[r-][w-][xsS-]' # Owner perms
+ '[r-][w-][xsS-]' # Group perms
+ '[r-][w-][xtT-]') # Others perms
+
+ def __init__(self, perms: T.Optional[str] = None, owner: T.Union[str, int, None] = None,
+ group: T.Union[str, int, None] = None):
+ self.perms_s = perms
+ self.perms = self.perms_s_to_bits(perms)
+ self.owner = owner
+ self.group = group
+
+ def __repr__(self) -> str:
+ ret = '<FileMode: {!r} owner={} group={}'
+ return ret.format(self.perms_s, self.owner, self.group)
+
+ @classmethod
+ def perms_s_to_bits(cls, perms_s: T.Optional[str]) -> int:
+ '''
+ Does the opposite of stat.filemode(), converts strings of the form
+ 'rwxr-xr-x' to st_mode enums which can be passed to os.chmod()
+ '''
+ if perms_s is None:
+ # No perms specified, we will not touch the permissions
+ return -1
+ eg = 'rwxr-xr-x'
+ if not isinstance(perms_s, str):
+ raise MesonException(f'Install perms must be a string. For example, {eg!r}')
+ if len(perms_s) != 9 or not cls.symbolic_perms_regex.match(perms_s):
+ raise MesonException(f'File perms {perms_s!r} must be exactly 9 chars. For example, {eg!r}')
+ perms = 0
+ # Owner perms
+ if perms_s[0] == 'r':
+ perms |= stat.S_IRUSR
+ if perms_s[1] == 'w':
+ perms |= stat.S_IWUSR
+ if perms_s[2] == 'x':
+ perms |= stat.S_IXUSR
+ elif perms_s[2] == 'S':
+ perms |= stat.S_ISUID
+ elif perms_s[2] == 's':
+ perms |= stat.S_IXUSR
+ perms |= stat.S_ISUID
+ # Group perms
+ if perms_s[3] == 'r':
+ perms |= stat.S_IRGRP
+ if perms_s[4] == 'w':
+ perms |= stat.S_IWGRP
+ if perms_s[5] == 'x':
+ perms |= stat.S_IXGRP
+ elif perms_s[5] == 'S':
+ perms |= stat.S_ISGID
+ elif perms_s[5] == 's':
+ perms |= stat.S_IXGRP
+ perms |= stat.S_ISGID
+ # Others perms
+ if perms_s[6] == 'r':
+ perms |= stat.S_IROTH
+ if perms_s[7] == 'w':
+ perms |= stat.S_IWOTH
+ if perms_s[8] == 'x':
+ perms |= stat.S_IXOTH
+ elif perms_s[8] == 'T':
+ perms |= stat.S_ISVTX
+ elif perms_s[8] == 't':
+ perms |= stat.S_IXOTH
+ perms |= stat.S_ISVTX
+ return perms
+
+dot_C_dot_H_warning = """You are using .C or .H files in your project. This is deprecated.
+ Currently, Meson treats this as C++ code, but they
+ used to be treated as C code.
+ Note that the situation is a bit more complex if you are using the
+ Visual Studio compiler, as it treats .C files as C code, unless you add
+ the /TP compiler flag, but this is unreliable.
+ See https://github.com/mesonbuild/meson/pull/8747 for the discussions."""
+class File(HoldableObject):
+ def __init__(self, is_built: bool, subdir: str, fname: str):
+ if fname.endswith(".C") or fname.endswith(".H"):
+ mlog.warning(dot_C_dot_H_warning, once=True)
+ self.is_built = is_built
+ self.subdir = subdir
+ self.fname = fname
+ self.hash = hash((is_built, subdir, fname))
+
+ def __str__(self) -> str:
+ return self.relative_name()
+
+ def __repr__(self) -> str:
+ ret = '<File: {0}'
+ if not self.is_built:
+ ret += ' (not built)'
+ ret += '>'
+ return ret.format(self.relative_name())
+
+ @staticmethod
+ @lru_cache(maxsize=None)
+ def from_source_file(source_root: str, subdir: str, fname: str) -> 'File':
+ if not os.path.isfile(os.path.join(source_root, subdir, fname)):
+ raise MesonException('File %s does not exist.' % fname)
+ return File(False, subdir, fname)
+
+ @staticmethod
+ def from_built_file(subdir: str, fname: str) -> 'File':
+ return File(True, subdir, fname)
+
+ @staticmethod
+ def from_absolute_file(fname: str) -> 'File':
+ return File(False, '', fname)
+
+ @lru_cache(maxsize=None)
+ def rel_to_builddir(self, build_to_src: str) -> str:
+ if self.is_built:
+ return self.relative_name()
+ else:
+ return os.path.join(build_to_src, self.subdir, self.fname)
+
+ @lru_cache(maxsize=None)
+ def absolute_path(self, srcdir: str, builddir: str) -> str:
+ absdir = srcdir
+ if self.is_built:
+ absdir = builddir
+ return os.path.join(absdir, self.relative_name())
+
+ def endswith(self, ending: str) -> bool:
+ return self.fname.endswith(ending)
+
+ def split(self, s: str, maxsplit: int = -1) -> T.List[str]:
+ return self.fname.split(s, maxsplit=maxsplit)
+
+ def rsplit(self, s: str, maxsplit: int = -1) -> T.List[str]:
+ return self.fname.rsplit(s, maxsplit=maxsplit)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, File):
+ return NotImplemented
+ if self.hash != other.hash:
+ return False
+ return (self.fname, self.subdir, self.is_built) == (other.fname, other.subdir, other.is_built)
+
+ def __hash__(self) -> int:
+ return self.hash
+
+ @lru_cache(maxsize=None)
+ def relative_name(self) -> str:
+ return os.path.join(self.subdir, self.fname)
+
+
+def get_compiler_for_source(compilers: T.Iterable['CompilerType'], src: str) -> 'CompilerType':
+ """Given a set of compilers and a source, find the compiler for that source type."""
+ for comp in compilers:
+ if comp.can_compile(src):
+ return comp
+ raise MesonException(f'No specified compiler can handle file {src!s}')
+
+
+def classify_unity_sources(compilers: T.Iterable['CompilerType'], sources: T.Iterable[str]) -> T.Dict['CompilerType', T.List[str]]:
+ compsrclist = {} # type: T.Dict[CompilerType, T.List[str]]
+ for src in sources:
+ comp = get_compiler_for_source(compilers, src)
+ if comp not in compsrclist:
+ compsrclist[comp] = [src]
+ else:
+ compsrclist[comp].append(src)
+ return compsrclist
+
+
+class MachineChoice(enum.IntEnum):
+
+ """Enum class representing one of the two abstract machine names used in
+ most places: the build, and host, machines.
+ """
+
+ BUILD = 0
+ HOST = 1
+
+ def get_lower_case_name(self) -> str:
+ return PerMachine('build', 'host')[self]
+
+ def get_prefix(self) -> str:
+ return PerMachine('build.', '')[self]
+
+
+class PerMachine(T.Generic[_T]):
+ def __init__(self, build: _T, host: _T) -> None:
+ self.build = build
+ self.host = host
+
+ def __getitem__(self, machine: MachineChoice) -> _T:
+ return {
+ MachineChoice.BUILD: self.build,
+ MachineChoice.HOST: self.host,
+ }[machine]
+
+ def __setitem__(self, machine: MachineChoice, val: _T) -> None:
+ setattr(self, machine.get_lower_case_name(), val)
+
+ def miss_defaulting(self) -> "PerMachineDefaultable[T.Optional[_T]]":
+ """Unset definition duplicated from their previous to None
+
+ This is the inverse of ''default_missing''. By removing defaulted
+ machines, we can elaborate the original and then redefault them and thus
+ avoid repeating the elaboration explicitly.
+ """
+ unfreeze = PerMachineDefaultable() # type: PerMachineDefaultable[T.Optional[_T]]
+ unfreeze.build = self.build
+ unfreeze.host = self.host
+ if unfreeze.host == unfreeze.build:
+ unfreeze.host = None
+ return unfreeze
+
+ def __repr__(self) -> str:
+ return f'PerMachine({self.build!r}, {self.host!r})'
+
+
+class PerThreeMachine(PerMachine[_T]):
+ """Like `PerMachine` but includes `target` too.
+
+ It turns out just one thing do we need track the target machine. There's no
+ need to computer the `target` field so we don't bother overriding the
+ `__getitem__`/`__setitem__` methods.
+ """
+ def __init__(self, build: _T, host: _T, target: _T) -> None:
+ super().__init__(build, host)
+ self.target = target
+
+ def miss_defaulting(self) -> "PerThreeMachineDefaultable[T.Optional[_T]]":
+ """Unset definition duplicated from their previous to None
+
+ This is the inverse of ''default_missing''. By removing defaulted
+ machines, we can elaborate the original and then redefault them and thus
+ avoid repeating the elaboration explicitly.
+ """
+ unfreeze = PerThreeMachineDefaultable() # type: PerThreeMachineDefaultable[T.Optional[_T]]
+ unfreeze.build = self.build
+ unfreeze.host = self.host
+ unfreeze.target = self.target
+ if unfreeze.target == unfreeze.host:
+ unfreeze.target = None
+ if unfreeze.host == unfreeze.build:
+ unfreeze.host = None
+ return unfreeze
+
+ def matches_build_machine(self, machine: MachineChoice) -> bool:
+ return self.build == self[machine]
+
+ def __repr__(self) -> str:
+ return f'PerThreeMachine({self.build!r}, {self.host!r}, {self.target!r})'
+
+
+class PerMachineDefaultable(PerMachine[T.Optional[_T]]):
+ """Extends `PerMachine` with the ability to default from `None`s.
+ """
+ def __init__(self, build: T.Optional[_T] = None, host: T.Optional[_T] = None) -> None:
+ super().__init__(build, host)
+
+ def default_missing(self) -> "PerMachine[_T]":
+ """Default host to build
+
+ This allows just specifying nothing in the native case, and just host in the
+ cross non-compiler case.
+ """
+ freeze = PerMachine(self.build, self.host)
+ if freeze.host is None:
+ freeze.host = freeze.build
+ return freeze
+
+ def __repr__(self) -> str:
+ return f'PerMachineDefaultable({self.build!r}, {self.host!r})'
+
+ @classmethod
+ def default(cls, is_cross: bool, build: _T, host: _T) -> PerMachine[_T]:
+ """Easy way to get a defaulted value
+
+ This allows simplifying the case where you can control whether host and
+ build are separate or not with a boolean. If the is_cross value is set
+ to true then the optional host value will be used, otherwise the host
+ will be set to the build value.
+ """
+ m = cls(build)
+ if is_cross:
+ m.host = host
+ return m.default_missing()
+
+
+
+class PerThreeMachineDefaultable(PerMachineDefaultable, PerThreeMachine[T.Optional[_T]]):
+ """Extends `PerThreeMachine` with the ability to default from `None`s.
+ """
+ def __init__(self) -> None:
+ PerThreeMachine.__init__(self, None, None, None)
+
+ def default_missing(self) -> "PerThreeMachine[T.Optional[_T]]":
+ """Default host to build and target to host.
+
+ This allows just specifying nothing in the native case, just host in the
+ cross non-compiler case, and just target in the native-built
+ cross-compiler case.
+ """
+ freeze = PerThreeMachine(self.build, self.host, self.target)
+ if freeze.host is None:
+ freeze.host = freeze.build
+ if freeze.target is None:
+ freeze.target = freeze.host
+ return freeze
+
+ def __repr__(self) -> str:
+ return f'PerThreeMachineDefaultable({self.build!r}, {self.host!r}, {self.target!r})'
+
+
+def is_sunos() -> bool:
+ return platform.system().lower() == 'sunos'
+
+
+def is_osx() -> bool:
+ return platform.system().lower() == 'darwin'
+
+
+def is_linux() -> bool:
+ return platform.system().lower() == 'linux'
+
+
+def is_android() -> bool:
+ return platform.system().lower() == 'android'
+
+
+def is_haiku() -> bool:
+ return platform.system().lower() == 'haiku'
+
+
+def is_openbsd() -> bool:
+ return platform.system().lower() == 'openbsd'
+
+
+def is_windows() -> bool:
+ platname = platform.system().lower()
+ return platname == 'windows'
+
+def is_wsl() -> bool:
+ return is_linux() and 'microsoft' in platform.release().lower()
+
+def is_cygwin() -> bool:
+ return sys.platform == 'cygwin'
+
+
+def is_debianlike() -> bool:
+ return os.path.isfile('/etc/debian_version')
+
+
+def is_dragonflybsd() -> bool:
+ return platform.system().lower() == 'dragonfly'
+
+
+def is_netbsd() -> bool:
+ return platform.system().lower() == 'netbsd'
+
+
+def is_freebsd() -> bool:
+ return platform.system().lower() == 'freebsd'
+
+def is_irix() -> bool:
+ return platform.system().startswith('irix')
+
+def is_hurd() -> bool:
+ return platform.system().lower() == 'gnu'
+
+def is_qnx() -> bool:
+ return platform.system().lower() == 'qnx'
+
+def is_aix() -> bool:
+ return platform.system().lower() == 'aix'
+
+def exe_exists(arglist: T.List[str]) -> bool:
+ try:
+ if subprocess.run(arglist, timeout=10).returncode == 0:
+ return True
+ except (FileNotFoundError, subprocess.TimeoutExpired):
+ pass
+ return False
+
+
+@lru_cache(maxsize=None)
+def darwin_get_object_archs(objpath: str) -> 'ImmutableListProtocol[str]':
+ '''
+ For a specific object (executable, static library, dylib, etc), run `lipo`
+ to fetch the list of archs supported by it. Supports both thin objects and
+ 'fat' objects.
+ '''
+ _, stdo, stderr = Popen_safe(['lipo', '-info', objpath])
+ if not stdo:
+ mlog.debug(f'lipo {objpath}: {stderr}')
+ return None
+ stdo = stdo.rsplit(': ', 1)[1]
+ # Convert from lipo-style archs to meson-style CPUs
+ stdo = stdo.replace('i386', 'x86')
+ stdo = stdo.replace('arm64', 'aarch64')
+ # Add generic name for armv7 and armv7s
+ if 'armv7' in stdo:
+ stdo += ' arm'
+ return stdo.split()
+
+
+def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[T.Dict[str, str]]:
+ vcs_systems = [
+ dict(name = 'git', cmd = 'git', repo_dir = '.git', get_rev = 'git describe --dirty=+', rev_regex = '(.*)', dep = '.git/logs/HEAD'),
+ dict(name = 'mercurial', cmd = 'hg', repo_dir = '.hg', get_rev = 'hg id -i', rev_regex = '(.*)', dep = '.hg/dirstate'),
+ dict(name = 'subversion', cmd = 'svn', repo_dir = '.svn', get_rev = 'svn info', rev_regex = 'Revision: (.*)', dep = '.svn/wc.db'),
+ dict(name = 'bazaar', cmd = 'bzr', repo_dir = '.bzr', get_rev = 'bzr revno', rev_regex = '(.*)', dep = '.bzr'),
+ ]
+ if isinstance(source_dir, str):
+ source_dir = Path(source_dir)
+
+ parent_paths_and_self = collections.deque(source_dir.parents)
+ # Prepend the source directory to the front so we can check it;
+ # source_dir.parents doesn't include source_dir
+ parent_paths_and_self.appendleft(source_dir)
+ for curdir in parent_paths_and_self:
+ for vcs in vcs_systems:
+ if Path.is_dir(curdir.joinpath(vcs['repo_dir'])) and shutil.which(vcs['cmd']):
+ vcs['wc_dir'] = str(curdir)
+ return vcs
+ return None
+
+def current_vs_supports_modules() -> bool:
+ vsver = os.environ.get('VSCMD_VER', '')
+ nums = vsver.split('.', 2)
+ major = int(nums[0])
+ if major >= 17:
+ return True
+ if major == 16 and int(nums[1]) >= 10:
+ return True
+ return vsver.startswith('16.9.0') and '-pre.' in vsver
+
+# a helper class which implements the same version ordering as RPM
+class Version:
+ def __init__(self, s: str) -> None:
+ self._s = s
+
+ # split into numeric, alphabetic and non-alphanumeric sequences
+ sequences1 = re.finditer(r'(\d+|[a-zA-Z]+|[^a-zA-Z\d]+)', s)
+
+ # non-alphanumeric separators are discarded
+ sequences2 = [m for m in sequences1 if not re.match(r'[^a-zA-Z\d]+', m.group(1))]
+
+ # numeric sequences are converted from strings to ints
+ sequences3 = [int(m.group(1)) if m.group(1).isdigit() else m.group(1) for m in sequences2]
+
+ self._v = sequences3
+
+ def __str__(self) -> str:
+ return '{} (V={})'.format(self._s, str(self._v))
+
+ def __repr__(self) -> str:
+ return f'<Version: {self._s}>'
+
+ def __lt__(self, other: object) -> bool:
+ if isinstance(other, Version):
+ return self.__cmp(other, operator.lt)
+ return NotImplemented
+
+ def __gt__(self, other: object) -> bool:
+ if isinstance(other, Version):
+ return self.__cmp(other, operator.gt)
+ return NotImplemented
+
+ def __le__(self, other: object) -> bool:
+ if isinstance(other, Version):
+ return self.__cmp(other, operator.le)
+ return NotImplemented
+
+ def __ge__(self, other: object) -> bool:
+ if isinstance(other, Version):
+ return self.__cmp(other, operator.ge)
+ return NotImplemented
+
+ def __eq__(self, other: object) -> bool:
+ if isinstance(other, Version):
+ return self._v == other._v
+ return NotImplemented
+
+ def __ne__(self, other: object) -> bool:
+ if isinstance(other, Version):
+ return self._v != other._v
+ return NotImplemented
+
+ def __cmp(self, other: 'Version', comparator: T.Callable[[T.Any, T.Any], bool]) -> bool:
+ # compare each sequence in order
+ for ours, theirs in zip(self._v, other._v):
+ # sort a non-digit sequence before a digit sequence
+ ours_is_int = isinstance(ours, int)
+ theirs_is_int = isinstance(theirs, int)
+ if ours_is_int != theirs_is_int:
+ return comparator(ours_is_int, theirs_is_int)
+
+ if ours != theirs:
+ return comparator(ours, theirs)
+
+ # if equal length, all components have matched, so equal
+ # otherwise, the version with a suffix remaining is greater
+ return comparator(len(self._v), len(other._v))
+
+
+def _version_extract_cmpop(vstr2: str) -> T.Tuple[T.Callable[[T.Any, T.Any], bool], str]:
+ if vstr2.startswith('>='):
+ cmpop = operator.ge
+ vstr2 = vstr2[2:]
+ elif vstr2.startswith('<='):
+ cmpop = operator.le
+ vstr2 = vstr2[2:]
+ elif vstr2.startswith('!='):
+ cmpop = operator.ne
+ vstr2 = vstr2[2:]
+ elif vstr2.startswith('=='):
+ cmpop = operator.eq
+ vstr2 = vstr2[2:]
+ elif vstr2.startswith('='):
+ cmpop = operator.eq
+ vstr2 = vstr2[1:]
+ elif vstr2.startswith('>'):
+ cmpop = operator.gt
+ vstr2 = vstr2[1:]
+ elif vstr2.startswith('<'):
+ cmpop = operator.lt
+ vstr2 = vstr2[1:]
+ else:
+ cmpop = operator.eq
+
+ return (cmpop, vstr2)
+
+
+def version_compare(vstr1: str, vstr2: str) -> bool:
+ (cmpop, vstr2) = _version_extract_cmpop(vstr2)
+ return cmpop(Version(vstr1), Version(vstr2))
+
+
+def version_compare_many(vstr1: str, conditions: T.Union[str, T.Iterable[str]]) -> T.Tuple[bool, T.List[str], T.List[str]]:
+ if isinstance(conditions, str):
+ conditions = [conditions]
+ found = []
+ not_found = []
+ for req in conditions:
+ if not version_compare(vstr1, req):
+ not_found.append(req)
+ else:
+ found.append(req)
+ return not_found == [], not_found, found
+
+
+# determine if the minimum version satisfying the condition |condition| exceeds
+# the minimum version for a feature |minimum|
+def version_compare_condition_with_min(condition: str, minimum: str) -> bool:
+ if condition.startswith('>='):
+ cmpop = operator.le
+ condition = condition[2:]
+ elif condition.startswith('<='):
+ return False
+ elif condition.startswith('!='):
+ return False
+ elif condition.startswith('=='):
+ cmpop = operator.le
+ condition = condition[2:]
+ elif condition.startswith('='):
+ cmpop = operator.le
+ condition = condition[1:]
+ elif condition.startswith('>'):
+ cmpop = operator.lt
+ condition = condition[1:]
+ elif condition.startswith('<'):
+ return False
+ else:
+ cmpop = operator.le
+
+ # Declaring a project(meson_version: '>=0.46') and then using features in
+ # 0.46.0 is valid, because (knowing the meson versioning scheme) '0.46.0' is
+ # the lowest version which satisfies the constraint '>=0.46'.
+ #
+ # But this will fail here, because the minimum version required by the
+ # version constraint ('0.46') is strictly less (in our version comparison)
+ # than the minimum version needed for the feature ('0.46.0').
+ #
+ # Map versions in the constraint of the form '0.46' to '0.46.0', to embed
+ # this knowledge of the meson versioning scheme.
+ condition = condition.strip()
+ if re.match(r'^\d+.\d+$', condition):
+ condition += '.0'
+
+ return T.cast(bool, cmpop(Version(minimum), Version(condition)))
+
+def search_version(text: str) -> str:
+ # Usually of the type 4.1.4 but compiler output may contain
+ # stuff like this:
+ # (Sourcery CodeBench Lite 2014.05-29) 4.8.3 20140320 (prerelease)
+ # Limiting major version number to two digits seems to work
+ # thus far. When we get to GCC 100, this will break, but
+ # if we are still relevant when that happens, it can be
+ # considered an achievement in itself.
+ #
+ # This regex is reaching magic levels. If it ever needs
+ # to be updated, do not complexify but convert to something
+ # saner instead.
+ # We'll demystify it a bit with a verbose definition.
+ version_regex = re.compile(r"""
+ (?<! # Zero-width negative lookbehind assertion
+ (
+ \d # One digit
+ | \. # Or one period
+ ) # One occurrence
+ )
+ # Following pattern must not follow a digit or period
+ (
+ \d{1,2} # One or two digits
+ (
+ \.\d+ # Period and one or more digits
+ )+ # One or more occurrences
+ (
+ -[a-zA-Z0-9]+ # Hyphen and one or more alphanumeric
+ )? # Zero or one occurrence
+ ) # One occurrence
+ """, re.VERBOSE)
+ match = version_regex.search(text)
+ if match:
+ return match.group(0)
+
+ # try a simpler regex that has like "blah 2020.01.100 foo" or "blah 2020.01 foo"
+ version_regex = re.compile(r"(\d{1,4}\.\d{1,4}\.?\d{0,4})")
+ match = version_regex.search(text)
+ if match:
+ return match.group(0)
+
+ return 'unknown version'
+
+
+def default_libdir() -> str:
+ if is_debianlike():
+ try:
+ pc = subprocess.Popen(['dpkg-architecture', '-qDEB_HOST_MULTIARCH'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL)
+ (stdo, _) = pc.communicate()
+ if pc.returncode == 0:
+ archpath = stdo.decode().strip()
+ return 'lib/' + archpath
+ except Exception:
+ pass
+ if is_freebsd() or is_irix():
+ return 'lib'
+ if os.path.isdir('/usr/lib64') and not os.path.islink('/usr/lib64'):
+ return 'lib64'
+ return 'lib'
+
+
+def default_libexecdir() -> str:
+ # There is no way to auto-detect this, so it must be set at build time
+ return 'libexec'
+
+
+def default_prefix() -> str:
+ return 'c:/' if is_windows() else '/usr/local'
+
+
+def get_library_dirs() -> T.List[str]:
+ if is_windows():
+ return ['C:/mingw/lib'] # TODO: get programmatically
+ if is_osx():
+ return ['/usr/lib'] # TODO: get programmatically
+ # The following is probably Debian/Ubuntu specific.
+ # /usr/local/lib is first because it contains stuff
+ # installed by the sysadmin and is probably more up-to-date
+ # than /usr/lib. If you feel that this search order is
+ # problematic, please raise the issue on the mailing list.
+ unixdirs = ['/usr/local/lib', '/usr/lib', '/lib']
+
+ if is_freebsd():
+ return unixdirs
+ # FIXME: this needs to be further genericized for aarch64 etc.
+ machine = platform.machine()
+ if machine in ('i386', 'i486', 'i586', 'i686'):
+ plat = 'i386'
+ elif machine.startswith('arm'):
+ plat = 'arm'
+ else:
+ plat = ''
+
+ # Solaris puts 32-bit libraries in the main /lib & /usr/lib directories
+ # and 64-bit libraries in platform specific subdirectories.
+ if is_sunos():
+ if machine == 'i86pc':
+ plat = 'amd64'
+ elif machine.startswith('sun4'):
+ plat = 'sparcv9'
+
+ usr_platdir = Path('/usr/lib/') / plat
+ if usr_platdir.is_dir():
+ unixdirs += [str(x) for x in (usr_platdir).iterdir() if x.is_dir()]
+ if os.path.exists('/usr/lib64'):
+ unixdirs.append('/usr/lib64')
+
+ lib_platdir = Path('/lib/') / plat
+ if lib_platdir.is_dir():
+ unixdirs += [str(x) for x in (lib_platdir).iterdir() if x.is_dir()]
+ if os.path.exists('/lib64'):
+ unixdirs.append('/lib64')
+
+ return unixdirs
+
+
+def has_path_sep(name: str, sep: str = '/\\') -> bool:
+ 'Checks if any of the specified @sep path separators are in @name'
+ for each in sep:
+ if each in name:
+ return True
+ return False
+
+
+if is_windows():
+ # shlex.split is not suitable for splitting command line on Window (https://bugs.python.org/issue1724822);
+ # shlex.quote is similarly problematic. Below are "proper" implementations of these functions according to
+ # https://docs.microsoft.com/en-us/cpp/c-language/parsing-c-command-line-arguments and
+ # https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/
+
+ _whitespace = ' \t\n\r'
+ _find_unsafe_char = re.compile(fr'[{_whitespace}"]').search
+
+ def quote_arg(arg: str) -> str:
+ if arg and not _find_unsafe_char(arg):
+ return arg
+
+ result = '"'
+ num_backslashes = 0
+ for c in arg:
+ if c == '\\':
+ num_backslashes += 1
+ else:
+ if c == '"':
+ # Escape all backslashes and the following double quotation mark
+ num_backslashes = num_backslashes * 2 + 1
+
+ result += num_backslashes * '\\' + c
+ num_backslashes = 0
+
+ # Escape all backslashes, but let the terminating double quotation
+ # mark we add below be interpreted as a metacharacter
+ result += (num_backslashes * 2) * '\\' + '"'
+ return result
+
+ def split_args(cmd: str) -> T.List[str]:
+ result = []
+ arg = ''
+ num_backslashes = 0
+ num_quotes = 0
+ in_quotes = False
+ for c in cmd:
+ if c == '\\':
+ num_backslashes += 1
+ else:
+ if c == '"' and not (num_backslashes % 2):
+ # unescaped quote, eat it
+ arg += (num_backslashes // 2) * '\\'
+ num_quotes += 1
+ in_quotes = not in_quotes
+ elif c in _whitespace and not in_quotes:
+ if arg or num_quotes:
+ # reached the end of the argument
+ result.append(arg)
+ arg = ''
+ num_quotes = 0
+ else:
+ if c == '"':
+ # escaped quote
+ num_backslashes = (num_backslashes - 1) // 2
+
+ arg += num_backslashes * '\\' + c
+
+ num_backslashes = 0
+
+ if arg or num_quotes:
+ result.append(arg)
+
+ return result
+else:
+ def quote_arg(arg: str) -> str:
+ return shlex.quote(arg)
+
+ def split_args(cmd: str) -> T.List[str]:
+ return shlex.split(cmd)
+
+
+def join_args(args: T.Iterable[str]) -> str:
+ return ' '.join([quote_arg(x) for x in args])
+
+
+def do_replacement(regex: T.Pattern[str], line: str, variable_format: str,
+ confdata: 'ConfigurationData') -> T.Tuple[str, T.Set[str]]:
+ missing_variables = set() # type: T.Set[str]
+ if variable_format == 'cmake':
+ start_tag = '${'
+ backslash_tag = '\\${'
+ else:
+ assert variable_format in ['meson', 'cmake@']
+ start_tag = '@'
+ backslash_tag = '\\@'
+
+ def variable_replace(match: T.Match[str]) -> str:
+ # Pairs of escape characters before '@' or '\@'
+ if match.group(0).endswith('\\'):
+ num_escapes = match.end(0) - match.start(0)
+ return '\\' * (num_escapes // 2)
+ # Single escape character and '@'
+ elif match.group(0) == backslash_tag:
+ return start_tag
+ # Template variable to be replaced
+ else:
+ varname = match.group(1)
+ var_str = ''
+ if varname in confdata:
+ (var, desc) = confdata.get(varname)
+ if isinstance(var, str):
+ var_str = var
+ elif isinstance(var, int):
+ var_str = str(var)
+ else:
+ msg = f'Tried to replace variable {varname!r} value with ' \
+ f'something other than a string or int: {var!r}'
+ raise MesonException(msg)
+ else:
+ missing_variables.add(varname)
+ return var_str
+ return re.sub(regex, variable_replace, line), missing_variables
+
+def do_define(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData', variable_format: str) -> str:
+ def get_cmake_define(line: str, confdata: 'ConfigurationData') -> str:
+ arr = line.split()
+ define_value=[]
+ for token in arr[2:]:
+ try:
+ (v, desc) = confdata.get(token)
+ define_value += [str(v)]
+ except KeyError:
+ define_value += [token]
+ return ' '.join(define_value)
+
+ arr = line.split()
+ if variable_format == 'meson' and len(arr) != 2:
+ raise MesonException('#mesondefine does not contain exactly two tokens: %s' % line.strip())
+
+ varname = arr[1]
+ try:
+ (v, desc) = confdata.get(varname)
+ except KeyError:
+ return '/* #undef %s */\n' % varname
+ if isinstance(v, bool):
+ if v:
+ return '#define %s\n' % varname
+ else:
+ return '#undef %s\n' % varname
+ elif isinstance(v, int):
+ return '#define %s %d\n' % (varname, v)
+ elif isinstance(v, str):
+ if variable_format == 'meson':
+ result = v
+ else:
+ result = get_cmake_define(line, confdata)
+ result = f'#define {varname} {result}\n'
+ (result, missing_variable) = do_replacement(regex, result, variable_format, confdata)
+ return result
+ else:
+ raise MesonException('#mesondefine argument "%s" is of unknown type.' % varname)
+
+def get_variable_regex(variable_format: str = 'meson') -> T.Pattern[str]:
+ # Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define
+ # Also allow escaping '@' with '\@'
+ if variable_format in ['meson', 'cmake@']:
+ regex = re.compile(r'(?:\\\\)+(?=\\?@)|\\@|@([-a-zA-Z0-9_]+)@')
+ elif variable_format == 'cmake':
+ regex = re.compile(r'(?:\\\\)+(?=\\?\$)|\\\${|\${([-a-zA-Z0-9_]+)}')
+ else:
+ raise MesonException(f'Format "{variable_format}" not handled')
+ return regex
+
+def do_conf_str (src: str, data: list, confdata: 'ConfigurationData', variable_format: str,
+ encoding: str = 'utf-8') -> T.Tuple[T.List[str],T.Set[str], bool]:
+ def line_is_valid(line : str, variable_format: str) -> bool:
+ if variable_format == 'meson':
+ if '#cmakedefine' in line:
+ return False
+ else: #cmake format
+ if '#mesondefine' in line:
+ return False
+ return True
+
+ regex = get_variable_regex(variable_format)
+
+ search_token = '#mesondefine'
+ if variable_format != 'meson':
+ search_token = '#cmakedefine'
+
+ result = []
+ missing_variables = set()
+ # Detect when the configuration data is empty and no tokens were found
+ # during substitution so we can warn the user to use the `copy:` kwarg.
+ confdata_useless = not confdata.keys()
+ for line in data:
+ if line.startswith(search_token):
+ confdata_useless = False
+ line = do_define(regex, line, confdata, variable_format)
+ else:
+ if not line_is_valid(line,variable_format):
+ raise MesonException(f'Format error in {src}: saw "{line.strip()}" when format set to "{variable_format}"')
+ line, missing = do_replacement(regex, line, variable_format, confdata)
+ missing_variables.update(missing)
+ if missing:
+ confdata_useless = False
+ result.append(line)
+
+ return result, missing_variables, confdata_useless
+
+def do_conf_file(src: str, dst: str, confdata: 'ConfigurationData', variable_format: str,
+ encoding: str = 'utf-8') -> T.Tuple[T.Set[str], bool]:
+ try:
+ with open(src, encoding=encoding, newline='') as f:
+ data = f.readlines()
+ except Exception as e:
+ raise MesonException(f'Could not read input file {src}: {e!s}')
+
+ (result, missing_variables, confdata_useless) = do_conf_str(src, data, confdata, variable_format, encoding)
+ dst_tmp = dst + '~'
+ try:
+ with open(dst_tmp, 'w', encoding=encoding, newline='') as f:
+ f.writelines(result)
+ except Exception as e:
+ raise MesonException(f'Could not write output file {dst}: {e!s}')
+ shutil.copymode(src, dst_tmp)
+ replace_if_different(dst, dst_tmp)
+ return missing_variables, confdata_useless
+
+CONF_C_PRELUDE = '''/*
+ * Autogenerated by the Meson build system.
+ * Do not edit, your changes will be lost.
+ */
+
+#pragma once
+
+'''
+
+CONF_NASM_PRELUDE = '''; Autogenerated by the Meson build system.
+; Do not edit, your changes will be lost.
+
+'''
+
+def dump_conf_header(ofilename: str, cdata: 'ConfigurationData', output_format: str) -> None:
+ if output_format == 'c':
+ prelude = CONF_C_PRELUDE
+ prefix = '#'
+ elif output_format == 'nasm':
+ prelude = CONF_NASM_PRELUDE
+ prefix = '%'
+
+ ofilename_tmp = ofilename + '~'
+ with open(ofilename_tmp, 'w', encoding='utf-8') as ofile:
+ ofile.write(prelude)
+ for k in sorted(cdata.keys()):
+ (v, desc) = cdata.get(k)
+ if desc:
+ if output_format == 'c':
+ ofile.write('/* %s */\n' % desc)
+ elif output_format == 'nasm':
+ for line in desc.split('\n'):
+ ofile.write('; %s\n' % line)
+ if isinstance(v, bool):
+ if v:
+ ofile.write(f'{prefix}define {k}\n\n')
+ else:
+ ofile.write(f'{prefix}undef {k}\n\n')
+ elif isinstance(v, (int, str)):
+ ofile.write(f'{prefix}define {k} {v}\n\n')
+ else:
+ raise MesonException('Unknown data type in configuration file entry: ' + k)
+ replace_if_different(ofilename, ofilename_tmp)
+
+
+def replace_if_different(dst: str, dst_tmp: str) -> None:
+ # If contents are identical, don't touch the file to prevent
+ # unnecessary rebuilds.
+ different = True
+ try:
+ with open(dst, 'rb') as f1, open(dst_tmp, 'rb') as f2:
+ if f1.read() == f2.read():
+ different = False
+ except FileNotFoundError:
+ pass
+ if different:
+ os.replace(dst_tmp, dst)
+ else:
+ os.unlink(dst_tmp)
+
+
+
+def listify(item: T.Any, flatten: bool = True) -> T.List[T.Any]:
+ '''
+ Returns a list with all args embedded in a list if they are not a list.
+ This function preserves order.
+ @flatten: Convert lists of lists to a flat list
+ '''
+ if not isinstance(item, list):
+ return [item]
+ result = [] # type: T.List[T.Any]
+ for i in item:
+ if flatten and isinstance(i, list):
+ result += listify(i, flatten=True)
+ else:
+ result.append(i)
+ return result
+
+
+def extract_as_list(dict_object: T.Dict[_T, _U], key: _T, pop: bool = False) -> T.List[_U]:
+ '''
+ Extracts all values from given dict_object and listifies them.
+ '''
+ fetch = dict_object.get
+ if pop:
+ fetch = dict_object.pop
+ # If there's only one key, we don't return a list with one element
+ return listify(fetch(key, []), flatten=True)
+
+
+def typeslistify(item: 'T.Union[_T, T.Sequence[_T]]',
+ types: 'T.Union[T.Type[_T], T.Tuple[T.Type[_T]]]') -> T.List[_T]:
+ '''
+ Ensure that type(@item) is one of @types or a
+ list of items all of which are of type @types
+ '''
+ if isinstance(item, types):
+ item = T.cast(T.List[_T], [item])
+ if not isinstance(item, list):
+ raise MesonException('Item must be a list or one of {!r}, not {!r}'.format(types, type(item)))
+ for i in item:
+ if i is not None and not isinstance(i, types):
+ raise MesonException('List item must be one of {!r}, not {!r}'.format(types, type(i)))
+ return item
+
+
+def stringlistify(item: T.Union[T.Any, T.Sequence[T.Any]]) -> T.List[str]:
+ return typeslistify(item, str)
+
+
+def expand_arguments(args: T.Iterable[str]) -> T.Optional[T.List[str]]:
+ expended_args = [] # type: T.List[str]
+ for arg in args:
+ if not arg.startswith('@'):
+ expended_args.append(arg)
+ continue
+
+ args_file = arg[1:]
+ try:
+ with open(args_file, encoding='utf-8') as f:
+ extended_args = f.read().split()
+ expended_args += extended_args
+ except Exception as e:
+ mlog.error('Expanding command line arguments:', args_file, 'not found')
+ mlog.exception(e)
+ return None
+ return expended_args
+
+
+def partition(pred: T.Callable[[_T], object], iterable: T.Iterable[_T]) -> T.Tuple[T.Iterator[_T], T.Iterator[_T]]:
+ """Use a predicate to partition entries into false entries and true
+ entries.
+
+ >>> x, y = partition(is_odd, range(10))
+ >>> (list(x), list(y))
+ ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
+ """
+ t1, t2 = tee(iterable)
+ return filterfalse(pred, t1), filter(pred, t2)
+
+
+def Popen_safe(args: T.List[str], write: T.Optional[str] = None,
+ stdout: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
+ stderr: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
+ **kwargs: T.Any) -> T.Tuple[subprocess.Popen, str, str]:
+ import locale
+ encoding = locale.getpreferredencoding()
+ # Redirect stdin to DEVNULL otherwise the command run by us here might mess
+ # up the console and ANSI colors will stop working on Windows.
+ if 'stdin' not in kwargs:
+ kwargs['stdin'] = subprocess.DEVNULL
+ if not sys.stdout.encoding or encoding.upper() != 'UTF-8':
+ p, o, e = Popen_safe_legacy(args, write=write, stdout=stdout, stderr=stderr, **kwargs)
+ else:
+ p = subprocess.Popen(args, universal_newlines=True, close_fds=False,
+ stdout=stdout, stderr=stderr, **kwargs)
+ o, e = p.communicate(write)
+ # Sometimes the command that we run will call another command which will be
+ # without the above stdin workaround, so set the console mode again just in
+ # case.
+ mlog.setup_console()
+ return p, o, e
+
+
+def Popen_safe_legacy(args: T.List[str], write: T.Optional[str] = None,
+ stdout: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
+ stderr: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
+ **kwargs: T.Any) -> T.Tuple[subprocess.Popen, str, str]:
+ p = subprocess.Popen(args, universal_newlines=False, close_fds=False,
+ stdout=stdout, stderr=stderr, **kwargs)
+ input_ = None # type: T.Optional[bytes]
+ if write is not None:
+ input_ = write.encode('utf-8')
+ o, e = p.communicate(input_)
+ if o is not None:
+ if sys.stdout.encoding:
+ o = o.decode(encoding=sys.stdout.encoding, errors='replace').replace('\r\n', '\n')
+ else:
+ o = o.decode(errors='replace').replace('\r\n', '\n')
+ if e is not None:
+ if sys.stderr.encoding:
+ e = e.decode(encoding=sys.stderr.encoding, errors='replace').replace('\r\n', '\n')
+ else:
+ e = e.decode(errors='replace').replace('\r\n', '\n')
+ return p, o, e
+
+
+def iter_regexin_iter(regexiter: T.Iterable[str], initer: T.Iterable[str]) -> T.Optional[str]:
+ '''
+ Takes each regular expression in @regexiter and tries to search for it in
+ every item in @initer. If there is a match, returns that match.
+ Else returns False.
+ '''
+ for regex in regexiter:
+ for ii in initer:
+ if not isinstance(ii, str):
+ continue
+ match = re.search(regex, ii)
+ if match:
+ return match.group()
+ return None
+
+
+def _substitute_values_check_errors(command: T.List[str], values: T.Dict[str, str]) -> None:
+ # Error checking
+ inregex = ['@INPUT([0-9]+)?@', '@PLAINNAME@', '@BASENAME@'] # type: T.List[str]
+ outregex = ['@OUTPUT([0-9]+)?@', '@OUTDIR@'] # type: T.List[str]
+ if '@INPUT@' not in values:
+ # Error out if any input-derived templates are present in the command
+ match = iter_regexin_iter(inregex, command)
+ if match:
+ raise MesonException(f'Command cannot have {match!r}, since no input files were specified')
+ else:
+ if len(values['@INPUT@']) > 1:
+ # Error out if @PLAINNAME@ or @BASENAME@ is present in the command
+ match = iter_regexin_iter(inregex[1:], command)
+ if match:
+ raise MesonException(f'Command cannot have {match!r} when there is '
+ 'more than one input file')
+ # Error out if an invalid @INPUTnn@ template was specified
+ for each in command:
+ if not isinstance(each, str):
+ continue
+ match2 = re.search(inregex[0], each)
+ if match2 and match2.group() not in values:
+ m = 'Command cannot have {!r} since there are only {!r} inputs'
+ raise MesonException(m.format(match2.group(), len(values['@INPUT@'])))
+ if '@OUTPUT@' not in values:
+ # Error out if any output-derived templates are present in the command
+ match = iter_regexin_iter(outregex, command)
+ if match:
+ m = 'Command cannot have {!r} since there are no outputs'
+ raise MesonException(m.format(match))
+ else:
+ # Error out if an invalid @OUTPUTnn@ template was specified
+ for each in command:
+ if not isinstance(each, str):
+ continue
+ match2 = re.search(outregex[0], each)
+ if match2 and match2.group() not in values:
+ m = 'Command cannot have {!r} since there are only {!r} outputs'
+ raise MesonException(m.format(match2.group(), len(values['@OUTPUT@'])))
+
+
+def substitute_values(command: T.List[str], values: T.Dict[str, str]) -> T.List[str]:
+ '''
+ Substitute the template strings in the @values dict into the list of
+ strings @command and return a new list. For a full list of the templates,
+ see get_filenames_templates_dict()
+
+ If multiple inputs/outputs are given in the @values dictionary, we
+ substitute @INPUT@ and @OUTPUT@ only if they are the entire string, not
+ just a part of it, and in that case we substitute *all* of them.
+ '''
+ # Error checking
+ _substitute_values_check_errors(command, values)
+ # Substitution
+ outcmd = [] # type: T.List[str]
+ rx_keys = [re.escape(key) for key in values if key not in ('@INPUT@', '@OUTPUT@')]
+ value_rx = re.compile('|'.join(rx_keys)) if rx_keys else None
+ for vv in command:
+ if not isinstance(vv, str):
+ outcmd.append(vv)
+ elif '@INPUT@' in vv:
+ inputs = values['@INPUT@']
+ if vv == '@INPUT@':
+ outcmd += inputs
+ elif len(inputs) == 1:
+ outcmd.append(vv.replace('@INPUT@', inputs[0]))
+ else:
+ raise MesonException("Command has '@INPUT@' as part of a "
+ "string and more than one input file")
+ elif '@OUTPUT@' in vv:
+ outputs = values['@OUTPUT@']
+ if vv == '@OUTPUT@':
+ outcmd += outputs
+ elif len(outputs) == 1:
+ outcmd.append(vv.replace('@OUTPUT@', outputs[0]))
+ else:
+ raise MesonException("Command has '@OUTPUT@' as part of a "
+ "string and more than one output file")
+ # Append values that are exactly a template string.
+ # This is faster than a string replace.
+ elif vv in values:
+ outcmd.append(values[vv])
+ # Substitute everything else with replacement
+ elif value_rx:
+ outcmd.append(value_rx.sub(lambda m: values[m.group(0)], vv))
+ else:
+ outcmd.append(vv)
+ return outcmd
+
+
+def get_filenames_templates_dict(inputs: T.List[str], outputs: T.List[str]) -> T.Dict[str, T.Union[str, T.List[str]]]:
+ '''
+ Create a dictionary with template strings as keys and values as values for
+ the following templates:
+
+ @INPUT@ - the full path to one or more input files, from @inputs
+ @OUTPUT@ - the full path to one or more output files, from @outputs
+ @OUTDIR@ - the full path to the directory containing the output files
+
+ If there is only one input file, the following keys are also created:
+
+ @PLAINNAME@ - the filename of the input file
+ @BASENAME@ - the filename of the input file with the extension removed
+
+ If there is more than one input file, the following keys are also created:
+
+ @INPUT0@, @INPUT1@, ... one for each input file
+
+ If there is more than one output file, the following keys are also created:
+
+ @OUTPUT0@, @OUTPUT1@, ... one for each output file
+ '''
+ values = {} # type: T.Dict[str, T.Union[str, T.List[str]]]
+ # Gather values derived from the input
+ if inputs:
+ # We want to substitute all the inputs.
+ values['@INPUT@'] = inputs
+ for (ii, vv) in enumerate(inputs):
+ # Write out @INPUT0@, @INPUT1@, ...
+ values[f'@INPUT{ii}@'] = vv
+ if len(inputs) == 1:
+ # Just one value, substitute @PLAINNAME@ and @BASENAME@
+ values['@PLAINNAME@'] = plain = os.path.basename(inputs[0])
+ values['@BASENAME@'] = os.path.splitext(plain)[0]
+ if outputs:
+ # Gather values derived from the outputs, similar to above.
+ values['@OUTPUT@'] = outputs
+ for (ii, vv) in enumerate(outputs):
+ values[f'@OUTPUT{ii}@'] = vv
+ # Outdir should be the same for all outputs
+ values['@OUTDIR@'] = os.path.dirname(outputs[0])
+ # Many external programs fail on empty arguments.
+ if values['@OUTDIR@'] == '':
+ values['@OUTDIR@'] = '.'
+ return values
+
+
+def _make_tree_writable(topdir: str) -> None:
+ # Ensure all files and directories under topdir are writable
+ # (and readable) by owner.
+ for d, _, files in os.walk(topdir):
+ os.chmod(d, os.stat(d).st_mode | stat.S_IWRITE | stat.S_IREAD)
+ for fname in files:
+ fpath = os.path.join(d, fname)
+ if os.path.isfile(fpath):
+ os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
+
+
+def windows_proof_rmtree(f: str) -> None:
+ # On Windows if anyone is holding a file open you can't
+ # delete it. As an example an anti virus scanner might
+ # be scanning files you are trying to delete. The only
+ # way to fix this is to try again and again.
+ delays = [0.1, 0.1, 0.2, 0.2, 0.2, 0.5, 0.5, 1, 1, 1, 1, 2]
+ writable = False
+ for d in delays:
+ try:
+ # Start by making the tree writable.
+ if not writable:
+ _make_tree_writable(f)
+ writable = True
+ except PermissionError:
+ time.sleep(d)
+ continue
+ try:
+ shutil.rmtree(f)
+ return
+ except FileNotFoundError:
+ return
+ except OSError:
+ time.sleep(d)
+ # Try one last time and throw if it fails.
+ shutil.rmtree(f)
+
+
+def windows_proof_rm(fpath: str) -> None:
+ """Like windows_proof_rmtree, but for a single file."""
+ if os.path.isfile(fpath):
+ os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
+ delays = [0.1, 0.1, 0.2, 0.2, 0.2, 0.5, 0.5, 1, 1, 1, 1, 2]
+ for d in delays:
+ try:
+ os.unlink(fpath)
+ return
+ except FileNotFoundError:
+ return
+ except OSError:
+ time.sleep(d)
+ os.unlink(fpath)
+
+
+class TemporaryDirectoryWinProof(TemporaryDirectory):
+ """
+ Like TemporaryDirectory, but cleans things up using
+ windows_proof_rmtree()
+ """
+
+ def __exit__(self, exc: T.Any, value: T.Any, tb: T.Any) -> None:
+ try:
+ super().__exit__(exc, value, tb)
+ except OSError:
+ windows_proof_rmtree(self.name)
+
+ def cleanup(self) -> None:
+ try:
+ super().cleanup()
+ except OSError:
+ windows_proof_rmtree(self.name)
+
+
+def detect_subprojects(spdir_name: str, current_dir: str = '',
+ result: T.Optional[T.Dict[str, T.List[str]]] = None) -> T.Optional[T.Dict[str, T.List[str]]]:
+ if result is None:
+ result = {}
+ spdir = os.path.join(current_dir, spdir_name)
+ if not os.path.exists(spdir):
+ return result
+ for trial in glob(os.path.join(spdir, '*')):
+ basename = os.path.basename(trial)
+ if trial == 'packagecache':
+ continue
+ append_this = True
+ if os.path.isdir(trial):
+ detect_subprojects(spdir_name, trial, result)
+ elif trial.endswith('.wrap') and os.path.isfile(trial):
+ basename = os.path.splitext(basename)[0]
+ else:
+ append_this = False
+ if append_this:
+ if basename in result:
+ result[basename].append(trial)
+ else:
+ result[basename] = [trial]
+ return result
+
+
+def substring_is_in_list(substr: str, strlist: T.List[str]) -> bool:
+ for s in strlist:
+ if substr in s:
+ return True
+ return False
+
+
+class OrderedSet(T.MutableSet[_T]):
+ """A set that preserves the order in which items are added, by first
+ insertion.
+ """
+ def __init__(self, iterable: T.Optional[T.Iterable[_T]] = None):
+ # typing.OrderedDict is new in 3.7.2, so we can't use that, but we can
+ # use MutableMapping, which is fine in this case.
+ self.__container = collections.OrderedDict() # type: T.MutableMapping[_T, None]
+ if iterable:
+ self.update(iterable)
+
+ def __contains__(self, value: object) -> bool:
+ return value in self.__container
+
+ def __iter__(self) -> T.Iterator[_T]:
+ return iter(self.__container.keys())
+
+ def __len__(self) -> int:
+ return len(self.__container)
+
+ def __repr__(self) -> str:
+ # Don't print 'OrderedSet("")' for an empty set.
+ if self.__container:
+ return 'OrderedSet("{}")'.format(
+ '", "'.join(repr(e) for e in self.__container.keys()))
+ return 'OrderedSet()'
+
+ def __reversed__(self) -> T.Iterator[_T]:
+ # Mypy is complaining that sets cant be reversed, which is true for
+ # unordered sets, but this is an ordered, set so reverse() makes sense.
+ return reversed(self.__container.keys()) # type: ignore
+
+ def add(self, value: _T) -> None:
+ self.__container[value] = None
+
+ def discard(self, value: _T) -> None:
+ if value in self.__container:
+ del self.__container[value]
+
+ def move_to_end(self, value: _T, last: bool = True) -> None:
+ # Mypy does not know about move_to_end, because it is not part of MutableMapping
+ self.__container.move_to_end(value, last) # type: ignore
+
+ def pop(self, last: bool = True) -> _T:
+ # Mypy does not know about the last argument, because it is not part of MutableMapping
+ item, _ = self.__container.popitem(last) # type: ignore
+ return item
+
+ def update(self, iterable: T.Iterable[_T]) -> None:
+ for item in iterable:
+ self.__container[item] = None
+
+ def difference(self, set_: T.Union[T.Set[_T], 'OrderedSet[_T]']) -> 'OrderedSet[_T]':
+ return type(self)(e for e in self if e not in set_)
+
+def relpath(path: str, start: str) -> str:
+ # On Windows a relative path can't be evaluated for paths on two different
+ # drives (i.e. c:\foo and f:\bar). The only thing left to do is to use the
+ # original absolute path.
+ try:
+ return os.path.relpath(path, start)
+ except (TypeError, ValueError):
+ return path
+
+def path_is_in_root(path: Path, root: Path, resolve: bool = False) -> bool:
+ # Check whether a path is within the root directory root
+ try:
+ if resolve:
+ path.resolve().relative_to(root.resolve())
+ else:
+ path.relative_to(root)
+ except ValueError:
+ return False
+ return True
+
+def relative_to_if_possible(path: Path, root: Path, resolve: bool = False) -> Path:
+ try:
+ if resolve:
+ return path.resolve().relative_to(root.resolve())
+ else:
+ return path.relative_to(root)
+ except ValueError:
+ return path
+
+class LibType(enum.IntEnum):
+
+ """Enumeration for library types."""
+
+ SHARED = 0
+ STATIC = 1
+ PREFER_SHARED = 2
+ PREFER_STATIC = 3
+
+
+class ProgressBarFallback: # lgtm [py/iter-returns-non-self]
+ '''
+ Fallback progress bar implementation when tqdm is not found
+
+ Since this class is not an actual iterator, but only provides a minimal
+ fallback, it is safe to ignore the 'Iterator does not return self from
+ __iter__ method' warning.
+ '''
+ def __init__(self, iterable: T.Optional[T.Iterable[str]] = None, total: T.Optional[int] = None,
+ bar_type: T.Optional[str] = None, desc: T.Optional[str] = None):
+ if iterable is not None:
+ self.iterable = iter(iterable)
+ return
+ self.total = total
+ self.done = 0
+ self.printed_dots = 0
+ if self.total and bar_type == 'download':
+ print('Download size:', self.total)
+ if desc:
+ print(f'{desc}: ', end='')
+
+ # Pretend to be an iterator when called as one and don't print any
+ # progress
+ def __iter__(self) -> T.Iterator[str]:
+ return self.iterable
+
+ def __next__(self) -> str:
+ return next(self.iterable)
+
+ def print_dot(self) -> None:
+ print('.', end='')
+ sys.stdout.flush()
+ self.printed_dots += 1
+
+ def update(self, progress: int) -> None:
+ self.done += progress
+ if not self.total:
+ # Just print one dot per call if we don't have a total length
+ self.print_dot()
+ return
+ ratio = int(self.done / self.total * 10)
+ while self.printed_dots < ratio:
+ self.print_dot()
+
+ def close(self) -> None:
+ print('')
+
+try:
+ from tqdm import tqdm
+except ImportError:
+ # ideally we would use a typing.Protocol here, but it's part of typing_extensions until 3.8
+ ProgressBar = ProgressBarFallback # type: T.Union[T.Type[ProgressBarFallback], T.Type[ProgressBarTqdm]]
+else:
+ class ProgressBarTqdm(tqdm):
+ def __init__(self, *args: T.Any, bar_type: T.Optional[str] = None, **kwargs: T.Any) -> None:
+ if bar_type == 'download':
+ kwargs.update({'unit': 'bytes', 'leave': True})
+ else:
+ kwargs.update({'leave': False})
+ kwargs['ncols'] = 100
+ super().__init__(*args, **kwargs)
+
+ ProgressBar = ProgressBarTqdm
+
+
+class RealPathAction(argparse.Action):
+ def __init__(self, option_strings: T.List[str], dest: str, default: str = '.', **kwargs: T.Any):
+ default = os.path.abspath(os.path.realpath(default))
+ super().__init__(option_strings, dest, nargs=None, default=default, **kwargs)
+
+ def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace,
+ values: T.Union[str, T.Sequence[T.Any], None], option_string: str = None) -> None:
+ assert isinstance(values, str)
+ setattr(namespace, self.dest, os.path.abspath(os.path.realpath(values)))
+
+
+def get_wine_shortpath(winecmd: T.List[str], wine_paths: T.Sequence[str]) -> str:
+ """Get A short version of @wine_paths to avoid reaching WINEPATH number
+ of char limit.
+ """
+
+ wine_paths = list(OrderedSet(wine_paths))
+
+ getShortPathScript = '%s.bat' % str(uuid.uuid4()).lower()[:5]
+ with open(getShortPathScript, mode='w', encoding='utf-8') as f:
+ f.write("@ECHO OFF\nfor %%x in (%*) do (\n echo|set /p=;%~sx\n)\n")
+ f.flush()
+ try:
+ with open(os.devnull, 'w', encoding='utf-8') as stderr:
+ wine_path = subprocess.check_output(
+ winecmd +
+ ['cmd', '/C', getShortPathScript] + wine_paths,
+ stderr=stderr).decode('utf-8')
+ except subprocess.CalledProcessError as e:
+ print("Could not get short paths: %s" % e)
+ wine_path = ';'.join(wine_paths)
+ finally:
+ os.remove(getShortPathScript)
+ if len(wine_path) > 2048:
+ raise MesonException(
+ 'WINEPATH size {} > 2048'
+ ' this will cause random failure.'.format(
+ len(wine_path)))
+
+ return wine_path.strip(';')
+
+
+def run_once(func: T.Callable[..., _T]) -> T.Callable[..., _T]:
+ ret = [] # type: T.List[_T]
+
+ @wraps(func)
+ def wrapper(*args: T.Any, **kwargs: T.Any) -> _T:
+ if ret:
+ return ret[0]
+
+ val = func(*args, **kwargs)
+ ret.append(val)
+ return val
+
+ return wrapper
+
+
+class OptionProxy(T.Generic[_T]):
+ def __init__(self, value: _T, choices: T.Optional[T.List[str]] = None):
+ self.value = value
+ self.choices = choices
+
+ def set_value(self, v: _T) -> None:
+ # XXX: should this be an error
+ self.value = v
+
+
+class OptionOverrideProxy(collections.abc.MutableMapping):
+
+ '''Mimic an option list but transparently override selected option
+ values.
+ '''
+
+ # TODO: the typing here could be made more explicit using a TypeDict from
+ # python 3.8 or typing_extensions
+
+ def __init__(self, overrides: T.Dict['OptionKey', T.Any], *options: 'KeyedOptionDictType'):
+ self.overrides = overrides.copy()
+ self.options: T.Dict['OptionKey', UserOption] = {}
+ for o in options:
+ self.options.update(o)
+
+ def __getitem__(self, key: 'OptionKey') -> T.Union['UserOption', OptionProxy]:
+ if key in self.options:
+ opt = self.options[key]
+ if key in self.overrides:
+ return OptionProxy(opt.validate_value(self.overrides[key]), getattr(opt, 'choices', None))
+ return opt
+ raise KeyError('Option not found', key)
+
+ def __setitem__(self, key: 'OptionKey', value: T.Union['UserOption', OptionProxy]) -> None:
+ self.overrides[key] = value.value
+
+ def __delitem__(self, key: 'OptionKey') -> None:
+ del self.overrides[key]
+
+ def __iter__(self) -> T.Iterator['OptionKey']:
+ return iter(self.options)
+
+ def __len__(self) -> int:
+ return len(self.options)
+
+ def copy(self) -> 'OptionOverrideProxy':
+ return OptionOverrideProxy(self.overrides.copy(), self.options.copy())
+
+
+class OptionType(enum.Enum):
+
+ """Enum used to specify what kind of argument a thing is."""
+
+ BUILTIN = 0
+ BASE = 1
+ COMPILER = 2
+ PROJECT = 3
+ BACKEND = 4
+
+# This is copied from coredata. There is no way to share this, because this
+# is used in the OptionKey constructor, and the coredata lists are
+# OptionKeys...
+_BUILTIN_NAMES = {
+ 'prefix',
+ 'bindir',
+ 'datadir',
+ 'includedir',
+ 'infodir',
+ 'libdir',
+ 'libexecdir',
+ 'localedir',
+ 'localstatedir',
+ 'mandir',
+ 'sbindir',
+ 'sharedstatedir',
+ 'sysconfdir',
+ 'auto_features',
+ 'backend',
+ 'buildtype',
+ 'debug',
+ 'default_library',
+ 'errorlogs',
+ 'install_umask',
+ 'layout',
+ 'optimization',
+ 'stdsplit',
+ 'strip',
+ 'unity',
+ 'unity_size',
+ 'warning_level',
+ 'werror',
+ 'wrap_mode',
+ 'force_fallback_for',
+ 'pkg_config_path',
+ 'cmake_prefix_path',
+}
+
+
+def _classify_argument(key: 'OptionKey') -> OptionType:
+ """Classify arguments into groups so we know which dict to assign them to."""
+
+ if key.name.startswith('b_'):
+ return OptionType.BASE
+ elif key.lang is not None:
+ return OptionType.COMPILER
+ elif key.name in _BUILTIN_NAMES:
+ return OptionType.BUILTIN
+ elif key.name.startswith('backend_'):
+ assert key.machine is MachineChoice.HOST, str(key)
+ return OptionType.BACKEND
+ else:
+ assert key.machine is MachineChoice.HOST, str(key)
+ return OptionType.PROJECT
+
+
+@total_ordering
+class OptionKey:
+
+ """Represents an option key in the various option dictionaries.
+
+ This provides a flexible, powerful way to map option names from their
+ external form (things like subproject:build.option) to something that
+ internally easier to reason about and produce.
+ """
+
+ __slots__ = ['name', 'subproject', 'machine', 'lang', '_hash', 'type']
+
+ name: str
+ subproject: str
+ machine: MachineChoice
+ lang: T.Optional[str]
+ _hash: int
+ type: OptionType
+
+ def __init__(self, name: str, subproject: str = '',
+ machine: MachineChoice = MachineChoice.HOST,
+ lang: T.Optional[str] = None, _type: T.Optional[OptionType] = None):
+ # the _type option to the constructor is kinda private. We want to be
+ # able tos ave the state and avoid the lookup function when
+ # pickling/unpickling, but we need to be able to calculate it when
+ # constructing a new OptionKey
+ object.__setattr__(self, 'name', name)
+ object.__setattr__(self, 'subproject', subproject)
+ object.__setattr__(self, 'machine', machine)
+ object.__setattr__(self, 'lang', lang)
+ object.__setattr__(self, '_hash', hash((name, subproject, machine, lang)))
+ if _type is None:
+ _type = _classify_argument(self)
+ object.__setattr__(self, 'type', _type)
+
+ def __setattr__(self, key: str, value: T.Any) -> None:
+ raise AttributeError('OptionKey instances do not support mutation.')
+
+ def __getstate__(self) -> T.Dict[str, T.Any]:
+ return {
+ 'name': self.name,
+ 'subproject': self.subproject,
+ 'machine': self.machine,
+ 'lang': self.lang,
+ '_type': self.type,
+ }
+
+ def __setstate__(self, state: T.Dict[str, T.Any]) -> None:
+ """De-serialize the state of a pickle.
+
+ This is very clever. __init__ is not a constructor, it's an
+ initializer, therefore it's safe to call more than once. We create a
+ state in the custom __getstate__ method, which is valid to pass
+ splatted to the initializer.
+ """
+ # Mypy doesn't like this, because it's so clever.
+ self.__init__(**state) # type: ignore
+
+ def __hash__(self) -> int:
+ return self._hash
+
+ def __eq__(self, other: object) -> bool:
+ if isinstance(other, OptionKey):
+ return (
+ self.name == other.name and
+ self.subproject == other.subproject and
+ self.machine is other.machine and
+ self.lang == other.lang)
+ return NotImplemented
+
+ def __lt__(self, other: object) -> bool:
+ if isinstance(other, OptionKey):
+ return (
+ self.name < other.name and
+ self.subproject < other.subproject and
+ self.machine < other.machine and
+ self.lang < other.lang)
+ return NotImplemented
+
+ def __str__(self) -> str:
+ out = self.name
+ if self.lang:
+ out = f'{self.lang}_{out}'
+ if self.machine is MachineChoice.BUILD:
+ out = f'build.{out}'
+ if self.subproject:
+ out = f'{self.subproject}:{out}'
+ return out
+
+ def __repr__(self) -> str:
+ return f'OptionKey({repr(self.name)}, {repr(self.subproject)}, {repr(self.machine)}, {repr(self.lang)})'
+
+ @classmethod
+ def from_string(cls, raw: str) -> 'OptionKey':
+ """Parse the raw command line format into a three part tuple.
+
+ This takes strings like `mysubproject:build.myoption` and Creates an
+ OptionKey out of them.
+ """
+ try:
+ subproject, raw2 = raw.split(':')
+ except ValueError:
+ subproject, raw2 = '', raw
+
+ if raw2.startswith('build.'):
+ raw3 = raw2.split('.', 1)[1]
+ for_machine = MachineChoice.BUILD
+ else:
+ raw3 = raw2
+ for_machine = MachineChoice.HOST
+
+ from ..compilers import all_languages
+ if any(raw3.startswith(f'{l}_') for l in all_languages):
+ lang, opt = raw3.split('_', 1)
+ else:
+ lang, opt = None, raw3
+ assert ':' not in opt
+ assert 'build.' not in opt
+
+ return cls(opt, subproject, for_machine, lang)
+
+ def evolve(self, name: T.Optional[str] = None, subproject: T.Optional[str] = None,
+ machine: T.Optional[MachineChoice] = None, lang: T.Optional[str] = '') -> 'OptionKey':
+ """Create a new copy of this key, but with alterted members.
+
+ For example:
+ >>> a = OptionKey('foo', '', MachineChoice.Host)
+ >>> b = OptionKey('foo', 'bar', MachineChoice.Host)
+ >>> b == a.evolve(subproject='bar')
+ True
+ """
+ # We have to be a little clever with lang here, because lang is valid
+ # as None, for non-compiler options
+ return OptionKey(
+ name if name is not None else self.name,
+ subproject if subproject is not None else self.subproject,
+ machine if machine is not None else self.machine,
+ lang if lang != '' else self.lang,
+ )
+
+ def as_root(self) -> 'OptionKey':
+ """Convenience method for key.evolve(subproject='')."""
+ return self.evolve(subproject='')
+
+ def as_build(self) -> 'OptionKey':
+ """Convenience method for key.evolve(machine=MachinceChoice.BUILD)."""
+ return self.evolve(machine=MachineChoice.BUILD)
+
+ def as_host(self) -> 'OptionKey':
+ """Convenience method for key.evolve(machine=MachinceChoice.HOST)."""
+ return self.evolve(machine=MachineChoice.HOST)
+
+ def is_backend(self) -> bool:
+ """Convenience method to check if this is a backend option."""
+ return self.type is OptionType.BACKEND
+
+ def is_builtin(self) -> bool:
+ """Convenience method to check if this is a builtin option."""
+ return self.type is OptionType.BUILTIN
+
+ def is_compiler(self) -> bool:
+ """Convenience method to check if this is a builtin option."""
+ return self.type is OptionType.COMPILER
+
+ def is_project(self) -> bool:
+ """Convenience method to check if this is a project option."""
+ return self.type is OptionType.PROJECT
+
+ def is_base(self) -> bool:
+ """Convenience method to check if this is a base option."""
+ return self.type is OptionType.BASE
diff --git a/meson/mesonbuild/mesonlib/win32.py b/meson/mesonbuild/mesonlib/win32.py
new file mode 100644
index 000000000..bc0caec94
--- /dev/null
+++ b/meson/mesonbuild/mesonlib/win32.py
@@ -0,0 +1,39 @@
+# SPDX-license-identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Windows specific implementations of mesonlib functionality."""
+
+import msvcrt
+import typing as T
+
+from .universal import MesonException
+from .platform import BuildDirLock as BuildDirLockBase
+
+__all__ = ['BuildDirLock']
+
+class BuildDirLock(BuildDirLockBase):
+
+ def __enter__(self) -> None:
+ self.lockfile = open(self.lockfilename, 'w', encoding='utf-8')
+ try:
+ msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1)
+ except (BlockingIOError, PermissionError):
+ self.lockfile.close()
+ raise MesonException('Some other Meson process is already using this build directory. Exiting.')
+
+ def __exit__(self, *args: T.Any) -> None:
+ msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1)
+ self.lockfile.close()
diff --git a/meson/mesonbuild/mesonmain.py b/meson/mesonbuild/mesonmain.py
new file mode 100644
index 000000000..8b7c9c176
--- /dev/null
+++ b/meson/mesonbuild/mesonmain.py
@@ -0,0 +1,329 @@
+# Copyright 2012-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Work around some pathlib bugs...
+from . import _pathlib
+import sys
+sys.modules['pathlib'] = _pathlib
+
+import os.path
+import importlib
+import traceback
+import argparse
+import codecs
+import shutil
+
+from . import mesonlib
+from . import mlog
+from . import mconf, mdist, minit, minstall, mintro, msetup, mtest, rewriter, msubprojects, munstable_coredata, mcompile, mdevenv
+from .mesonlib import MesonException
+from .environment import detect_msys2_arch
+from .wrap import wraptool
+
+need_setup_vsenv = False
+
+bat_template = '''@ECHO OFF
+
+call "{}"
+
+ECHO {}
+SET
+'''
+
+# If on Windows and VS is installed but not set up in the environment,
+# set it to be runnable. In this way Meson can be directly invoked
+# from any shell, VS Code etc.
+def setup_vsenv() -> None:
+ import subprocess, json, pathlib
+ if not mesonlib.is_windows():
+ return
+ bat_placeholder = 'nananananananananananananananana'
+ # If an existing build tool chain exists in PATH -> do nothing.
+ if shutil.which('cc'):
+ return
+ if shutil.which('gcc'):
+ return
+ if shutil.which('clang'):
+ return
+ if shutil.which('clang-cl'):
+ return
+ if os.environ.get('OSTYPE', bat_placeholder) == 'cygwin':
+ return
+ if 'Visual Studio' in os.environ['PATH']:
+ return
+ # VSINSTALL is set when running setvars from a Visual Studio installation
+ # Tested with Visual Studio 2012 and 2017
+ if 'VSINSTALLDIR' in os.environ:
+ return
+ # Check explicitly for cl when on Windows
+ if shutil.which('cl.exe'):
+ return
+
+ root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
+ bat_locator_bin = pathlib.Path(root, 'Microsoft Visual Studio/Installer/vswhere.exe')
+ if not bat_locator_bin.exists():
+ return
+ bat_json = subprocess.check_output(
+ [
+ str(bat_locator_bin),
+ '-latest',
+ '-prerelease',
+ '-requiresAny',
+ '-requires', 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64',
+ '-products', '*',
+ '-utf8',
+ '-format',
+ 'json'
+ ]
+ )
+ bat_info = json.loads(bat_json)
+ if not bat_info:
+ # VS installer instelled but not VS itself maybe?
+ return
+ print('Activating VS', bat_info[0]['catalog']['productDisplayVersion'])
+ bat_root = pathlib.Path(bat_info[0]['installationPath'])
+ bat_path = bat_root / 'VC/Auxiliary/Build/vcvars64.bat'
+ if not bat_path.exists():
+ return
+
+ bat_file = pathlib.Path.home() / 'vsdetect.bat'
+
+ bat_separator = '---SPLIT---'
+ bat_contents = bat_template.format(bat_path, bat_separator)
+ bat_file.write_text(bat_contents, encoding='utf-8')
+ try:
+ bat_output = subprocess.check_output(str(bat_file), universal_newlines=True)
+ finally:
+ bat_file.unlink()
+ bat_lines = bat_output.split('\n')
+ bat_separator_seen = False
+ for bat_line in bat_lines:
+ if bat_line == bat_separator:
+ bat_separator_seen = True
+ continue
+ if not bat_separator_seen:
+ continue
+ if not bat_line:
+ continue
+ k, v = bat_line.split('=', 1)
+ os.environ[k] = v
+ global need_setup_vsenv
+ need_setup_vsenv = True
+
+
+# Note: when adding arguments, please also add them to the completion
+# scripts in $MESONSRC/data/shell-completions/
+class CommandLineParser:
+ def __init__(self):
+ self.term_width = shutil.get_terminal_size().columns
+ self.formatter = lambda prog: argparse.HelpFormatter(prog, max_help_position=int(self.term_width / 2), width=self.term_width)
+
+ self.commands = {}
+ self.hidden_commands = []
+ self.parser = argparse.ArgumentParser(prog='meson', formatter_class=self.formatter)
+ self.subparsers = self.parser.add_subparsers(title='Commands', dest='command',
+ description='If no command is specified it defaults to setup command.')
+ self.add_command('setup', msetup.add_arguments, msetup.run,
+ help_msg='Configure the project')
+ self.add_command('configure', mconf.add_arguments, mconf.run,
+ help_msg='Change project options',)
+ self.add_command('dist', mdist.add_arguments, mdist.run,
+ help_msg='Generate release archive',)
+ self.add_command('install', minstall.add_arguments, minstall.run,
+ help_msg='Install the project')
+ self.add_command('introspect', mintro.add_arguments, mintro.run,
+ help_msg='Introspect project')
+ self.add_command('init', minit.add_arguments, minit.run,
+ help_msg='Create a new project')
+ self.add_command('test', mtest.add_arguments, mtest.run,
+ help_msg='Run tests')
+ self.add_command('wrap', wraptool.add_arguments, wraptool.run,
+ help_msg='Wrap tools')
+ self.add_command('subprojects', msubprojects.add_arguments, msubprojects.run,
+ help_msg='Manage subprojects')
+ self.add_command('help', self.add_help_arguments, self.run_help_command,
+ help_msg='Print help of a subcommand')
+ self.add_command('rewrite', lambda parser: rewriter.add_arguments(parser, self.formatter), rewriter.run,
+ help_msg='Modify the project definition')
+ self.add_command('compile', mcompile.add_arguments, mcompile.run,
+ help_msg='Build the project')
+ self.add_command('devenv', mdevenv.add_arguments, mdevenv.run,
+ help_msg='Run commands in developer environment')
+
+ # Hidden commands
+ self.add_command('runpython', self.add_runpython_arguments, self.run_runpython_command,
+ help_msg=argparse.SUPPRESS)
+ self.add_command('unstable-coredata', munstable_coredata.add_arguments, munstable_coredata.run,
+ help_msg=argparse.SUPPRESS)
+
+ def add_command(self, name, add_arguments_func, run_func, help_msg, aliases=None):
+ aliases = aliases or []
+ # FIXME: Cannot have hidden subparser:
+ # https://bugs.python.org/issue22848
+ if help_msg == argparse.SUPPRESS:
+ p = argparse.ArgumentParser(prog='meson ' + name, formatter_class=self.formatter)
+ self.hidden_commands.append(name)
+ else:
+ p = self.subparsers.add_parser(name, help=help_msg, aliases=aliases, formatter_class=self.formatter)
+ add_arguments_func(p)
+ p.set_defaults(run_func=run_func)
+ for i in [name] + aliases:
+ self.commands[i] = p
+
+ def add_runpython_arguments(self, parser):
+ parser.add_argument('-c', action='store_true', dest='eval_arg', default=False)
+ parser.add_argument('script_file')
+ parser.add_argument('script_args', nargs=argparse.REMAINDER)
+
+ def run_runpython_command(self, options):
+ import runpy
+ if options.eval_arg:
+ exec(options.script_file)
+ else:
+ sys.argv[1:] = options.script_args
+ sys.path.insert(0, os.path.dirname(options.script_file))
+ runpy.run_path(options.script_file, run_name='__main__')
+ return 0
+
+ def add_help_arguments(self, parser):
+ parser.add_argument('command', nargs='?')
+
+ def run_help_command(self, options):
+ if options.command:
+ self.commands[options.command].print_help()
+ else:
+ self.parser.print_help()
+ return 0
+
+ def run(self, args):
+ # If first arg is not a known command, assume user wants to run the setup
+ # command.
+ known_commands = list(self.commands.keys()) + ['-h', '--help']
+ if not args or args[0] not in known_commands:
+ args = ['setup'] + args
+
+ # Hidden commands have their own parser instead of using the global one
+ if args[0] in self.hidden_commands:
+ command = args[0]
+ parser = self.commands[command]
+ args = args[1:]
+ else:
+ parser = self.parser
+
+ args = mesonlib.expand_arguments(args)
+ options = parser.parse_args(args)
+
+ try:
+ return options.run_func(options)
+ except MesonException as e:
+ mlog.exception(e)
+ logfile = mlog.shutdown()
+ if logfile is not None:
+ mlog.log("\nA full log can be found at", mlog.bold(logfile))
+ if os.environ.get('MESON_FORCE_BACKTRACE'):
+ raise
+ return 1
+ except Exception:
+ if os.environ.get('MESON_FORCE_BACKTRACE'):
+ raise
+ traceback.print_exc()
+ return 2
+ finally:
+ mlog.shutdown()
+
+def run_script_command(script_name, script_args):
+ # Map script name to module name for those that doesn't match
+ script_map = {'exe': 'meson_exe',
+ 'install': 'meson_install',
+ 'delsuffix': 'delwithsuffix',
+ 'gtkdoc': 'gtkdochelper',
+ 'hotdoc': 'hotdochelper',
+ 'regencheck': 'regen_checker'}
+ module_name = script_map.get(script_name, script_name)
+
+ try:
+ module = importlib.import_module('mesonbuild.scripts.' + module_name)
+ except ModuleNotFoundError as e:
+ mlog.exception(e)
+ return 1
+
+ try:
+ return module.run(script_args)
+ except MesonException as e:
+ mlog.error(f'Error in {script_name} helper script:')
+ mlog.exception(e)
+ return 1
+
+def ensure_stdout_accepts_unicode():
+ if sys.stdout.encoding and not sys.stdout.encoding.upper().startswith('UTF-'):
+ if sys.version_info >= (3, 7):
+ sys.stdout.reconfigure(errors='surrogateescape')
+ else:
+ sys.stdout = codecs.getwriter('utf-8')(sys.stdout.detach(),
+ errors='surrogateescape')
+ sys.stdout.encoding = 'UTF-8'
+ if not hasattr(sys.stdout, 'buffer'):
+ sys.stdout.buffer = sys.stdout.raw if hasattr(sys.stdout, 'raw') else sys.stdout
+
+def run(original_args, mainfile):
+ if sys.version_info < (3, 6):
+ print('Meson works correctly only with python 3.6+.')
+ print(f'You have python {sys.version}.')
+ print('Please update your environment')
+ return 1
+
+ # Meson gets confused if stdout can't output Unicode, if the
+ # locale isn't Unicode, just force stdout to accept it. This tries
+ # to emulate enough of PEP 540 to work elsewhere.
+ ensure_stdout_accepts_unicode()
+
+ # https://github.com/mesonbuild/meson/issues/3653
+ if sys.platform.lower() == 'msys':
+ mlog.error('This python3 seems to be msys/python on MSYS2 Windows, which is known to have path semantics incompatible with Meson')
+ msys2_arch = detect_msys2_arch()
+ if msys2_arch:
+ mlog.error('Please install and use mingw-w64-i686-python3 and/or mingw-w64-x86_64-python3 with Pacman')
+ else:
+ mlog.error('Please download and use Python as detailed at: https://mesonbuild.com/Getting-meson.html')
+ return 2
+
+ # Set the meson command that will be used to run scripts and so on
+ mesonlib.set_meson_command(mainfile)
+
+ args = original_args[:]
+
+ # Special handling of internal commands called from backends, they don't
+ # need to go through argparse.
+ if len(args) >= 2 and args[0] == '--internal':
+ if args[1] == 'regenerate':
+ # Rewrite "meson --internal regenerate" command line to
+ # "meson --reconfigure"
+ args = ['--reconfigure'] + args[2:]
+ else:
+ return run_script_command(args[1], args[2:])
+
+ return CommandLineParser().run(args)
+
+def main():
+ setup_vsenv()
+ # Always resolve the command path so Ninja can find it for regen, tests, etc.
+ if 'meson.exe' in sys.executable:
+ assert(os.path.isabs(sys.executable))
+ launcher = sys.executable
+ else:
+ launcher = os.path.realpath(sys.argv[0])
+ return run(sys.argv[1:], launcher)
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/meson/mesonbuild/minit.py b/meson/mesonbuild/minit.py
new file mode 100644
index 000000000..124e6c671
--- /dev/null
+++ b/meson/mesonbuild/minit.py
@@ -0,0 +1,186 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Code that creates simple startup projects."""
+
+from pathlib import Path
+from enum import Enum
+import subprocess
+import shutil
+import sys
+import os
+import re
+from glob import glob
+from mesonbuild import mesonlib
+from mesonbuild.environment import detect_ninja
+from mesonbuild.templates.samplefactory import sameple_generator
+import typing as T
+
+if T.TYPE_CHECKING:
+ import argparse
+
+'''
+we currently have one meson template at this time.
+'''
+from mesonbuild.templates.mesontemplates import create_meson_build
+
+FORTRAN_SUFFIXES = {'.f', '.for', '.F', '.f90', '.F90'}
+LANG_SUFFIXES = {'.c', '.cc', '.cpp', '.cs', '.cu', '.d', '.m', '.mm', '.rs', '.java'} | FORTRAN_SUFFIXES
+LANG_SUPPORTED = {'c', 'cpp', 'cs', 'cuda', 'd', 'fortran', 'java', 'rust', 'objc', 'objcpp'}
+
+DEFAULT_PROJECT = 'executable'
+DEFAULT_VERSION = '0.1'
+class DEFAULT_TYPES(Enum):
+ EXE = 'executable'
+ LIB = 'library'
+
+INFO_MESSAGE = '''Sample project created. To build it run the
+following commands:
+
+meson setup builddir
+meson compile -C builddir
+'''
+
+
+def create_sample(options: 'argparse.Namespace') -> None:
+ '''
+ Based on what arguments are passed we check for a match in language
+ then check for project type and create new Meson samples project.
+ '''
+ sample_gen = sameple_generator(options)
+ if options.type == DEFAULT_TYPES['EXE'].value:
+ sample_gen.create_executable()
+ elif options.type == DEFAULT_TYPES['LIB'].value:
+ sample_gen.create_library()
+ else:
+ raise RuntimeError('Unreachable code')
+ print(INFO_MESSAGE)
+
+def autodetect_options(options: 'argparse.Namespace', sample: bool = False) -> None:
+ '''
+ Here we autodetect options for args not passed in so don't have to
+ think about it.
+ '''
+ if not options.name:
+ options.name = Path().resolve().stem
+ if not re.match('[a-zA-Z_][a-zA-Z0-9]*', options.name) and sample:
+ raise SystemExit('Name of current directory "{}" is not usable as a sample project name.\n'
+ 'Specify a project name with --name.'.format(options.name))
+ print('Using "{}" (name of current directory) as project name.'
+ .format(options.name))
+ if not options.executable:
+ options.executable = options.name
+ print('Using "{}" (project name) as name of executable to build.'
+ .format(options.executable))
+ if sample:
+ # The rest of the autodetection is not applicable to generating sample projects.
+ return
+ if not options.srcfiles:
+ srcfiles = []
+ for f in (f for f in Path().iterdir() if f.is_file()):
+ if f.suffix in LANG_SUFFIXES:
+ srcfiles.append(f)
+ if not srcfiles:
+ raise SystemExit('No recognizable source files found.\n'
+ 'Run meson init in an empty directory to create a sample project.')
+ options.srcfiles = srcfiles
+ print("Detected source files: " + ' '.join(map(str, srcfiles)))
+ options.srcfiles = [Path(f) for f in options.srcfiles]
+ if not options.language:
+ for f in options.srcfiles:
+ if f.suffix == '.c':
+ options.language = 'c'
+ break
+ if f.suffix in ('.cc', '.cpp'):
+ options.language = 'cpp'
+ break
+ if f.suffix == '.cs':
+ options.language = 'cs'
+ break
+ if f.suffix == '.cu':
+ options.language = 'cuda'
+ break
+ if f.suffix == '.d':
+ options.language = 'd'
+ break
+ if f.suffix in FORTRAN_SUFFIXES:
+ options.language = 'fortran'
+ break
+ if f.suffix == '.rs':
+ options.language = 'rust'
+ break
+ if f.suffix == '.m':
+ options.language = 'objc'
+ break
+ if f.suffix == '.mm':
+ options.language = 'objcpp'
+ break
+ if f.suffix == '.java':
+ options.language = 'java'
+ break
+ if not options.language:
+ raise SystemExit("Can't autodetect language, please specify it with -l.")
+ print("Detected language: " + options.language)
+
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
+ '''
+ Here we add args for that the user can passed when making a new
+ Meson project.
+ '''
+ parser.add_argument("srcfiles", metavar="sourcefile", nargs="*", help="source files. default: all recognized files in current directory")
+ parser.add_argument('-C', dest='wd', action=mesonlib.RealPathAction,
+ help='directory to cd into before running')
+ parser.add_argument("-n", "--name", help="project name. default: name of current directory")
+ parser.add_argument("-e", "--executable", help="executable name. default: project name")
+ parser.add_argument("-d", "--deps", help="dependencies, comma-separated")
+ parser.add_argument("-l", "--language", choices=sorted(LANG_SUPPORTED), help="project language. default: autodetected based on source files")
+ parser.add_argument("-b", "--build", action='store_true', help="build after generation")
+ parser.add_argument("--builddir", default='build', help="directory for build")
+ parser.add_argument("-f", "--force", action="store_true", help="force overwrite of existing files and directories.")
+ parser.add_argument('--type', default=DEFAULT_PROJECT, choices=('executable', 'library'), help=f"project type. default: {DEFAULT_PROJECT} based project")
+ parser.add_argument('--version', default=DEFAULT_VERSION, help=f"project version. default: {DEFAULT_VERSION}")
+
+def run(options: 'argparse.Namespace') -> int:
+ '''
+ Here we generate the new Meson sample project.
+ '''
+ if not Path(options.wd).exists():
+ sys.exit('Project source root directory not found. Run this command in source directory root.')
+ os.chdir(options.wd)
+
+ if not glob('*'):
+ autodetect_options(options, sample=True)
+ if not options.language:
+ print('Defaulting to generating a C language project.')
+ options.language = 'c'
+ create_sample(options)
+ else:
+ autodetect_options(options)
+ if Path('meson.build').is_file() and not options.force:
+ raise SystemExit('meson.build already exists. Use --force to overwrite.')
+ create_meson_build(options)
+ if options.build:
+ if Path(options.builddir).is_dir() and options.force:
+ print('Build directory already exists, deleting it.')
+ shutil.rmtree(options.builddir)
+ print('Building...')
+ cmd = mesonlib.get_meson_command() + [options.builddir]
+ ret = subprocess.run(cmd)
+ if ret.returncode:
+ raise SystemExit
+ cmd = detect_ninja() + ['-C', options.builddir]
+ ret = subprocess.run(cmd)
+ if ret.returncode:
+ raise SystemExit
+ return 0
diff --git a/meson/mesonbuild/minstall.py b/meson/mesonbuild/minstall.py
new file mode 100644
index 000000000..e753d94db
--- /dev/null
+++ b/meson/mesonbuild/minstall.py
@@ -0,0 +1,721 @@
+# Copyright 2013-2014 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from glob import glob
+from pathlib import Path
+import argparse
+import errno
+import os
+import pickle
+import shlex
+import shutil
+import subprocess
+import sys
+import typing as T
+
+from . import environment
+from .backend.backends import InstallData
+from .coredata import major_versions_differ, MesonVersionMismatchException
+from .coredata import version as coredata_version
+from .mesonlib import Popen_safe, RealPathAction, is_windows
+from .scripts import depfixer, destdir_join
+from .scripts.meson_exe import run_exe
+try:
+ from __main__ import __file__ as main_file
+except ImportError:
+ # Happens when running as meson.exe which is native Windows.
+ # This is only used for pkexec which is not, so this is fine.
+ main_file = None
+
+if T.TYPE_CHECKING:
+ from .mesonlib import FileMode
+
+ try:
+ from typing import Protocol
+ except AttributeError:
+ from typing_extensions import Protocol # type: ignore
+
+ class ArgumentType(Protocol):
+ """Typing information for the object returned by argparse."""
+ no_rebuild: bool
+ only_changed: bool
+ profile: bool
+ quiet: bool
+ wd: str
+ destdir: str
+ dry_run: bool
+ skip_subprojects: str
+
+
+symlink_warning = '''Warning: trying to copy a symlink that points to a file. This will copy the file,
+but this will be changed in a future version of Meson to copy the symlink as is. Please update your
+build definitions so that it will not break when the change happens.'''
+
+selinux_updates: T.List[str] = []
+
+def add_arguments(parser: argparse.ArgumentParser) -> None:
+ parser.add_argument('-C', dest='wd', action=RealPathAction,
+ help='directory to cd into before running')
+ parser.add_argument('--profile-self', action='store_true', dest='profile',
+ help=argparse.SUPPRESS)
+ parser.add_argument('--no-rebuild', default=False, action='store_true',
+ help='Do not rebuild before installing.')
+ parser.add_argument('--only-changed', default=False, action='store_true',
+ help='Only overwrite files that are older than the copied file.')
+ parser.add_argument('--quiet', default=False, action='store_true',
+ help='Do not print every file that was installed.')
+ parser.add_argument('--destdir', default=None,
+ help='Sets or overrides DESTDIR environment. (Since 0.57.0)')
+ parser.add_argument('--dry-run', '-n', action='store_true',
+ help='Doesn\'t actually install, but print logs. (Since 0.57.0)')
+ parser.add_argument('--skip-subprojects', nargs='?', const='*', default='',
+ help='Do not install files from given subprojects. (Since 0.58.0)')
+
+class DirMaker:
+ def __init__(self, lf: T.TextIO, makedirs: T.Callable[..., None]):
+ self.lf = lf
+ self.dirs: T.List[str] = []
+ self.makedirs_impl = makedirs
+
+ def makedirs(self, path: str, exist_ok: bool = False) -> None:
+ dirname = os.path.normpath(path)
+ dirs = []
+ while dirname != os.path.dirname(dirname):
+ if dirname in self.dirs:
+ # In dry-run mode the directory does not exist but we would have
+ # created it with all its parents otherwise.
+ break
+ if not os.path.exists(dirname):
+ dirs.append(dirname)
+ dirname = os.path.dirname(dirname)
+ self.makedirs_impl(path, exist_ok=exist_ok)
+
+ # store the directories in creation order, with the parent directory
+ # before the child directories. Future calls of makedir() will not
+ # create the parent directories, so the last element in the list is
+ # the last one to be created. That is the first one to be removed on
+ # __exit__
+ dirs.reverse()
+ self.dirs += dirs
+
+ def __enter__(self) -> 'DirMaker':
+ return self
+
+ def __exit__(self, exception_type: T.Type[Exception], value: T.Any, traceback: T.Any) -> None:
+ self.dirs.reverse()
+ for d in self.dirs:
+ append_to_log(self.lf, d)
+
+
+def is_executable(path: str, follow_symlinks: bool = False) -> bool:
+ '''Checks whether any of the "x" bits are set in the source file mode.'''
+ return bool(os.stat(path, follow_symlinks=follow_symlinks).st_mode & 0o111)
+
+
+def append_to_log(lf: T.TextIO, line: str) -> None:
+ lf.write(line)
+ if not line.endswith('\n'):
+ lf.write('\n')
+ lf.flush()
+
+
+def set_chown(path: str, user: T.Union[str, int, None] = None,
+ group: T.Union[str, int, None] = None,
+ dir_fd: T.Optional[int] = None, follow_symlinks: bool = True) -> None:
+ # shutil.chown will call os.chown without passing all the parameters
+ # and particularly follow_symlinks, thus we replace it temporary
+ # with a lambda with all the parameters so that follow_symlinks will
+ # be actually passed properly.
+ # Not nice, but better than actually rewriting shutil.chown until
+ # this python bug is fixed: https://bugs.python.org/issue18108
+ real_os_chown = os.chown
+
+ def chown(path: T.Union[int, str, 'os.PathLike[str]', bytes, 'os.PathLike[bytes]'],
+ uid: int, gid: int, *, dir_fd: T.Optional[int] = dir_fd,
+ follow_symlinks: bool = follow_symlinks) -> None:
+ """Override the default behavior of os.chown
+
+ Use a real function rather than a lambda to help mypy out. Also real
+ functions are faster.
+ """
+ real_os_chown(path, uid, gid, dir_fd=dir_fd, follow_symlinks=follow_symlinks)
+
+ try:
+ os.chown = chown
+ shutil.chown(path, user, group)
+ finally:
+ os.chown = real_os_chown
+
+
+def set_chmod(path: str, mode: int, dir_fd: T.Optional[int] = None,
+ follow_symlinks: bool = True) -> None:
+ try:
+ os.chmod(path, mode, dir_fd=dir_fd, follow_symlinks=follow_symlinks)
+ except (NotImplementedError, OSError, SystemError):
+ if not os.path.islink(path):
+ os.chmod(path, mode, dir_fd=dir_fd)
+
+
+def sanitize_permissions(path: str, umask: T.Union[str, int]) -> None:
+ # TODO: with python 3.8 or typing_extensions we could replace this with
+ # `umask: T.Union[T.Literal['preserve'], int]`, which would be mroe correct
+ if umask == 'preserve':
+ return
+ assert isinstance(umask, int), 'umask should only be "preserver" or an integer'
+ new_perms = 0o777 if is_executable(path, follow_symlinks=False) else 0o666
+ new_perms &= ~umask
+ try:
+ set_chmod(path, new_perms, follow_symlinks=False)
+ except PermissionError as e:
+ msg = '{!r}: Unable to set permissions {!r}: {}, ignoring...'
+ print(msg.format(path, new_perms, e.strerror))
+
+
+def set_mode(path: str, mode: T.Optional['FileMode'], default_umask: T.Union[str, int]) -> None:
+ if mode is None or all(m is None for m in [mode.perms_s, mode.owner, mode.group]):
+ # Just sanitize permissions with the default umask
+ sanitize_permissions(path, default_umask)
+ return
+ # No chown() on Windows, and must set one of owner/group
+ if not is_windows() and (mode.owner is not None or mode.group is not None):
+ try:
+ set_chown(path, mode.owner, mode.group, follow_symlinks=False)
+ except PermissionError as e:
+ msg = '{!r}: Unable to set owner {!r} and group {!r}: {}, ignoring...'
+ print(msg.format(path, mode.owner, mode.group, e.strerror))
+ except LookupError:
+ msg = '{!r}: Non-existent owner {!r} or group {!r}: ignoring...'
+ print(msg.format(path, mode.owner, mode.group))
+ except OSError as e:
+ if e.errno == errno.EINVAL:
+ msg = '{!r}: Non-existent numeric owner {!r} or group {!r}: ignoring...'
+ print(msg.format(path, mode.owner, mode.group))
+ else:
+ raise
+ # Must set permissions *after* setting owner/group otherwise the
+ # setuid/setgid bits will get wiped by chmod
+ # NOTE: On Windows you can set read/write perms; the rest are ignored
+ if mode.perms_s is not None:
+ try:
+ set_chmod(path, mode.perms, follow_symlinks=False)
+ except PermissionError as e:
+ msg = '{!r}: Unable to set permissions {!r}: {}, ignoring...'
+ print(msg.format(path, mode.perms_s, e.strerror))
+ else:
+ sanitize_permissions(path, default_umask)
+
+
+def restore_selinux_contexts() -> None:
+ '''
+ Restores the SELinux context for files in @selinux_updates
+
+ If $DESTDIR is set, do not warn if the call fails.
+ '''
+ try:
+ subprocess.check_call(['selinuxenabled'])
+ except (FileNotFoundError, NotADirectoryError, PermissionError, subprocess.CalledProcessError):
+ # If we don't have selinux or selinuxenabled returned 1, failure
+ # is ignored quietly.
+ return
+
+ if not shutil.which('restorecon'):
+ # If we don't have restorecon, failure is ignored quietly.
+ return
+
+ if not selinux_updates:
+ # If the list of files is empty, do not try to call restorecon.
+ return
+
+ with subprocess.Popen(['restorecon', '-F', '-f-', '-0'],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
+ out, err = proc.communicate(input=b'\0'.join(os.fsencode(f) for f in selinux_updates) + b'\0')
+ if proc.returncode != 0 and not os.environ.get('DESTDIR'):
+ print('Failed to restore SELinux context of installed files...',
+ 'Standard output:', out.decode(),
+ 'Standard error:', err.decode(), sep='\n')
+
+
+def get_destdir_path(destdir: str, fullprefix: str, path: str) -> str:
+ if os.path.isabs(path):
+ output = destdir_join(destdir, path)
+ else:
+ output = os.path.join(fullprefix, path)
+ return output
+
+
+def check_for_stampfile(fname: str) -> str:
+ '''Some languages e.g. Rust have output files
+ whose names are not known at configure time.
+ Check if this is the case and return the real
+ file instead.'''
+ if fname.endswith('.so') or fname.endswith('.dll'):
+ if os.stat(fname).st_size == 0:
+ (base, suffix) = os.path.splitext(fname)
+ files = glob(base + '-*' + suffix)
+ if len(files) > 1:
+ print("Stale dynamic library files in build dir. Can't install.")
+ sys.exit(1)
+ if len(files) == 1:
+ return files[0]
+ elif fname.endswith('.a') or fname.endswith('.lib'):
+ if os.stat(fname).st_size == 0:
+ (base, suffix) = os.path.splitext(fname)
+ files = glob(base + '-*' + '.rlib')
+ if len(files) > 1:
+ print("Stale static library files in build dir. Can't install.")
+ sys.exit(1)
+ if len(files) == 1:
+ return files[0]
+ return fname
+
+
+class Installer:
+
+ def __init__(self, options: 'ArgumentType', lf: T.TextIO):
+ self.did_install_something = False
+ self.options = options
+ self.lf = lf
+ self.preserved_file_count = 0
+ self.dry_run = options.dry_run
+ # [''] means skip none,
+ # ['*'] means skip all,
+ # ['sub1', ...] means skip only those.
+ self.skip_subprojects = [i.strip() for i in options.skip_subprojects.split(',')]
+
+ def remove(self, *args: T.Any, **kwargs: T.Any) -> None:
+ if not self.dry_run:
+ os.remove(*args, **kwargs)
+
+ def symlink(self, *args: T.Any, **kwargs: T.Any) -> None:
+ if not self.dry_run:
+ os.symlink(*args, **kwargs)
+
+ def makedirs(self, *args: T.Any, **kwargs: T.Any) -> None:
+ if not self.dry_run:
+ os.makedirs(*args, **kwargs)
+
+ def copy(self, *args: T.Any, **kwargs: T.Any) -> None:
+ if not self.dry_run:
+ shutil.copy(*args, **kwargs)
+
+ def copy2(self, *args: T.Any, **kwargs: T.Any) -> None:
+ if not self.dry_run:
+ shutil.copy2(*args, **kwargs)
+
+ def copyfile(self, *args: T.Any, **kwargs: T.Any) -> None:
+ if not self.dry_run:
+ shutil.copyfile(*args, **kwargs)
+
+ def copystat(self, *args: T.Any, **kwargs: T.Any) -> None:
+ if not self.dry_run:
+ shutil.copystat(*args, **kwargs)
+
+ def fix_rpath(self, *args: T.Any, **kwargs: T.Any) -> None:
+ if not self.dry_run:
+ depfixer.fix_rpath(*args, **kwargs)
+
+ def set_chown(self, *args: T.Any, **kwargs: T.Any) -> None:
+ if not self.dry_run:
+ set_chown(*args, **kwargs)
+
+ def set_chmod(self, *args: T.Any, **kwargs: T.Any) -> None:
+ if not self.dry_run:
+ set_chmod(*args, **kwargs)
+
+ def sanitize_permissions(self, *args: T.Any, **kwargs: T.Any) -> None:
+ if not self.dry_run:
+ sanitize_permissions(*args, **kwargs)
+
+ def set_mode(self, *args: T.Any, **kwargs: T.Any) -> None:
+ if not self.dry_run:
+ set_mode(*args, **kwargs)
+
+ def restore_selinux_contexts(self) -> None:
+ if not self.dry_run:
+ restore_selinux_contexts()
+
+ def Popen_safe(self, *args: T.Any, **kwargs: T.Any) -> T.Tuple[int, str, str]:
+ if not self.dry_run:
+ p, o, e = Popen_safe(*args, **kwargs)
+ return p.returncode, o, e
+ return 0, '', ''
+
+ def run_exe(self, *args: T.Any, **kwargs: T.Any) -> int:
+ if not self.dry_run:
+ return run_exe(*args, **kwargs)
+ return 0
+
+ def install_subproject(self, subproject: str) -> bool:
+ if subproject and (subproject in self.skip_subprojects or '*' in self.skip_subprojects):
+ return False
+ return True
+
+ def log(self, msg: str) -> None:
+ if not self.options.quiet:
+ print(msg)
+
+ def should_preserve_existing_file(self, from_file: str, to_file: str) -> bool:
+ if not self.options.only_changed:
+ return False
+ # Always replace danging symlinks
+ if os.path.islink(from_file) and not os.path.isfile(from_file):
+ return False
+ from_time = os.stat(from_file).st_mtime
+ to_time = os.stat(to_file).st_mtime
+ return from_time <= to_time
+
+ def do_copyfile(self, from_file: str, to_file: str,
+ makedirs: T.Optional[T.Tuple[T.Any, str]] = None) -> bool:
+ outdir = os.path.split(to_file)[0]
+ if not os.path.isfile(from_file) and not os.path.islink(from_file):
+ raise RuntimeError('Tried to install something that isn\'t a file:'
+ '{!r}'.format(from_file))
+ # copyfile fails if the target file already exists, so remove it to
+ # allow overwriting a previous install. If the target is not a file, we
+ # want to give a readable error.
+ if os.path.exists(to_file):
+ if not os.path.isfile(to_file):
+ raise RuntimeError('Destination {!r} already exists and is not '
+ 'a file'.format(to_file))
+ if self.should_preserve_existing_file(from_file, to_file):
+ append_to_log(self.lf, f'# Preserving old file {to_file}\n')
+ self.preserved_file_count += 1
+ return False
+ self.remove(to_file)
+ elif makedirs:
+ # Unpack tuple
+ dirmaker, outdir = makedirs
+ # Create dirs if needed
+ dirmaker.makedirs(outdir, exist_ok=True)
+ self.log(f'Installing {from_file} to {outdir}')
+ if os.path.islink(from_file):
+ if not os.path.exists(from_file):
+ # Dangling symlink. Replicate as is.
+ self.copy(from_file, outdir, follow_symlinks=False)
+ else:
+ # Remove this entire branch when changing the behaviour to duplicate
+ # symlinks rather than copying what they point to.
+ print(symlink_warning)
+ self.copy2(from_file, to_file)
+ else:
+ self.copy2(from_file, to_file)
+ selinux_updates.append(to_file)
+ append_to_log(self.lf, to_file)
+ return True
+
+ def do_copydir(self, data: InstallData, src_dir: str, dst_dir: str,
+ exclude: T.Optional[T.Tuple[T.Set[str], T.Set[str]]],
+ install_mode: 'FileMode', dm: DirMaker) -> None:
+ '''
+ Copies the contents of directory @src_dir into @dst_dir.
+
+ For directory
+ /foo/
+ bar/
+ excluded
+ foobar
+ file
+ do_copydir(..., '/foo', '/dst/dir', {'bar/excluded'}) creates
+ /dst/
+ dir/
+ bar/
+ foobar
+ file
+
+ Args:
+ src_dir: str, absolute path to the source directory
+ dst_dir: str, absolute path to the destination directory
+ exclude: (set(str), set(str)), tuple of (exclude_files, exclude_dirs),
+ each element of the set is a path relative to src_dir.
+ '''
+ if not os.path.isabs(src_dir):
+ raise ValueError(f'src_dir must be absolute, got {src_dir}')
+ if not os.path.isabs(dst_dir):
+ raise ValueError(f'dst_dir must be absolute, got {dst_dir}')
+ if exclude is not None:
+ exclude_files, exclude_dirs = exclude
+ else:
+ exclude_files = exclude_dirs = set()
+ for root, dirs, files in os.walk(src_dir):
+ assert os.path.isabs(root)
+ for d in dirs[:]:
+ abs_src = os.path.join(root, d)
+ filepart = os.path.relpath(abs_src, start=src_dir)
+ abs_dst = os.path.join(dst_dir, filepart)
+ # Remove these so they aren't visited by os.walk at all.
+ if filepart in exclude_dirs:
+ dirs.remove(d)
+ continue
+ if os.path.isdir(abs_dst):
+ continue
+ if os.path.exists(abs_dst):
+ print(f'Tried to copy directory {abs_dst} but a file of that name already exists.')
+ sys.exit(1)
+ dm.makedirs(abs_dst)
+ self.copystat(abs_src, abs_dst)
+ self.sanitize_permissions(abs_dst, data.install_umask)
+ for f in files:
+ abs_src = os.path.join(root, f)
+ filepart = os.path.relpath(abs_src, start=src_dir)
+ if filepart in exclude_files:
+ continue
+ abs_dst = os.path.join(dst_dir, filepart)
+ if os.path.isdir(abs_dst):
+ print(f'Tried to copy file {abs_dst} but a directory of that name already exists.')
+ sys.exit(1)
+ parent_dir = os.path.dirname(abs_dst)
+ if not os.path.isdir(parent_dir):
+ dm.makedirs(parent_dir)
+ self.copystat(os.path.dirname(abs_src), parent_dir)
+ # FIXME: what about symlinks?
+ self.do_copyfile(abs_src, abs_dst)
+ self.set_mode(abs_dst, install_mode, data.install_umask)
+
+ @staticmethod
+ def check_installdata(obj: InstallData) -> InstallData:
+ if not isinstance(obj, InstallData) or not hasattr(obj, 'version'):
+ raise MesonVersionMismatchException('<unknown>', coredata_version)
+ if major_versions_differ(obj.version, coredata_version):
+ raise MesonVersionMismatchException(obj.version, coredata_version)
+ return obj
+
+ def do_install(self, datafilename: str) -> None:
+ with open(datafilename, 'rb') as ifile:
+ d = self.check_installdata(pickle.load(ifile))
+
+ # Override in the env because some scripts could be relying on it.
+ if self.options.destdir is not None:
+ os.environ['DESTDIR'] = self.options.destdir
+
+ destdir = os.environ.get('DESTDIR', '')
+ fullprefix = destdir_join(destdir, d.prefix)
+
+ if d.install_umask != 'preserve':
+ assert isinstance(d.install_umask, int)
+ os.umask(d.install_umask)
+
+ self.did_install_something = False
+ try:
+ with DirMaker(self.lf, self.makedirs) as dm:
+ self.install_subdirs(d, dm, destdir, fullprefix) # Must be first, because it needs to delete the old subtree.
+ self.install_targets(d, dm, destdir, fullprefix)
+ self.install_headers(d, dm, destdir, fullprefix)
+ self.install_man(d, dm, destdir, fullprefix)
+ self.install_data(d, dm, destdir, fullprefix)
+ self.restore_selinux_contexts()
+ self.run_install_script(d, destdir, fullprefix)
+ if not self.did_install_something:
+ self.log('Nothing to install.')
+ if not self.options.quiet and self.preserved_file_count > 0:
+ self.log('Preserved {} unchanged files, see {} for the full list'
+ .format(self.preserved_file_count, os.path.normpath(self.lf.name)))
+ except PermissionError:
+ if shutil.which('pkexec') is not None and 'PKEXEC_UID' not in os.environ and destdir == '':
+ print('Installation failed due to insufficient permissions.')
+ print('Attempting to use polkit to gain elevated privileges...')
+ os.execlp('pkexec', 'pkexec', sys.executable, main_file, *sys.argv[1:],
+ '-C', os.getcwd())
+ else:
+ raise
+
+ def install_subdirs(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None:
+ for i in d.install_subdirs:
+ if not self.install_subproject(i.subproject):
+ continue
+ self.did_install_something = True
+ full_dst_dir = get_destdir_path(destdir, fullprefix, i.install_path)
+ self.log(f'Installing subdir {i.path} to {full_dst_dir}')
+ dm.makedirs(full_dst_dir, exist_ok=True)
+ self.do_copydir(d, i.path, full_dst_dir, i.exclude, i.install_mode, dm)
+
+ def install_data(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None:
+ for i in d.data:
+ if not self.install_subproject(i.subproject):
+ continue
+ fullfilename = i.path
+ outfilename = get_destdir_path(destdir, fullprefix, i.install_path)
+ outdir = os.path.dirname(outfilename)
+ if self.do_copyfile(fullfilename, outfilename, makedirs=(dm, outdir)):
+ self.did_install_something = True
+ self.set_mode(outfilename, i.install_mode, d.install_umask)
+
+ def install_man(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None:
+ for m in d.man:
+ if not self.install_subproject(m.subproject):
+ continue
+ full_source_filename = m.path
+ outfilename = get_destdir_path(destdir, fullprefix, m.install_path)
+ outdir = os.path.dirname(outfilename)
+ if self.do_copyfile(full_source_filename, outfilename, makedirs=(dm, outdir)):
+ self.did_install_something = True
+ self.set_mode(outfilename, m.install_mode, d.install_umask)
+
+ def install_headers(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None:
+ for t in d.headers:
+ if not self.install_subproject(t.subproject):
+ continue
+ fullfilename = t.path
+ fname = os.path.basename(fullfilename)
+ outdir = get_destdir_path(destdir, fullprefix, t.install_path)
+ outfilename = os.path.join(outdir, fname)
+ if self.do_copyfile(fullfilename, outfilename, makedirs=(dm, outdir)):
+ self.did_install_something = True
+ self.set_mode(outfilename, t.install_mode, d.install_umask)
+
+ def run_install_script(self, d: InstallData, destdir: str, fullprefix: str) -> None:
+ env = {'MESON_SOURCE_ROOT': d.source_dir,
+ 'MESON_BUILD_ROOT': d.build_dir,
+ 'MESON_INSTALL_PREFIX': d.prefix,
+ 'MESON_INSTALL_DESTDIR_PREFIX': fullprefix,
+ 'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in d.mesonintrospect]),
+ }
+ if self.options.quiet:
+ env['MESON_INSTALL_QUIET'] = '1'
+
+ for i in d.install_scripts:
+ if not self.install_subproject(i.subproject):
+ continue
+ name = ' '.join(i.cmd_args)
+ if i.skip_if_destdir and destdir:
+ self.log(f'Skipping custom install script because DESTDIR is set {name!r}')
+ continue
+ self.did_install_something = True # Custom script must report itself if it does nothing.
+ self.log(f'Running custom install script {name!r}')
+ try:
+ rc = self.run_exe(i, env)
+ except OSError:
+ print(f'FAILED: install script \'{name}\' could not be run, stopped')
+ # POSIX shells return 127 when a command could not be found
+ sys.exit(127)
+ if rc != 0:
+ print(f'FAILED: install script \'{name}\' exit code {rc}, stopped')
+ sys.exit(rc)
+
+ def install_targets(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None:
+ for t in d.targets:
+ if not self.install_subproject(t.subproject):
+ continue
+ if not os.path.exists(t.fname):
+ # For example, import libraries of shared modules are optional
+ if t.optional:
+ self.log(f'File {t.fname!r} not found, skipping')
+ continue
+ else:
+ raise RuntimeError(f'File {t.fname!r} could not be found')
+ file_copied = False # not set when a directory is copied
+ fname = check_for_stampfile(t.fname)
+ outdir = get_destdir_path(destdir, fullprefix, t.outdir)
+ outname = os.path.join(outdir, os.path.basename(fname))
+ final_path = os.path.join(d.prefix, t.outdir, os.path.basename(fname))
+ aliases = t.aliases
+ should_strip = t.strip
+ install_rpath = t.install_rpath
+ install_name_mappings = t.install_name_mappings
+ install_mode = t.install_mode
+ if not os.path.exists(fname):
+ raise RuntimeError(f'File {fname!r} could not be found')
+ elif os.path.isfile(fname):
+ file_copied = self.do_copyfile(fname, outname, makedirs=(dm, outdir))
+ self.set_mode(outname, install_mode, d.install_umask)
+ if should_strip and d.strip_bin is not None:
+ if fname.endswith('.jar'):
+ self.log('Not stripping jar target: {}'.format(os.path.basename(fname)))
+ continue
+ self.log('Stripping target {!r} using {}.'.format(fname, d.strip_bin[0]))
+ returncode, stdo, stde = self.Popen_safe(d.strip_bin + [outname])
+ if returncode != 0:
+ print('Could not strip file.\n')
+ print(f'Stdout:\n{stdo}\n')
+ print(f'Stderr:\n{stde}\n')
+ sys.exit(1)
+ if fname.endswith('.js'):
+ # Emscripten outputs js files and optionally a wasm file.
+ # If one was generated, install it as well.
+ wasm_source = os.path.splitext(fname)[0] + '.wasm'
+ if os.path.exists(wasm_source):
+ wasm_output = os.path.splitext(outname)[0] + '.wasm'
+ file_copied = self.do_copyfile(wasm_source, wasm_output)
+ elif os.path.isdir(fname):
+ fname = os.path.join(d.build_dir, fname.rstrip('/'))
+ outname = os.path.join(outdir, os.path.basename(fname))
+ dm.makedirs(outdir, exist_ok=True)
+ self.do_copydir(d, fname, outname, None, install_mode, dm)
+ else:
+ raise RuntimeError(f'Unknown file type for {fname!r}')
+ printed_symlink_error = False
+ for alias, to in aliases.items():
+ try:
+ symlinkfilename = os.path.join(outdir, alias)
+ try:
+ self.remove(symlinkfilename)
+ except FileNotFoundError:
+ pass
+ self.symlink(to, symlinkfilename)
+ append_to_log(self.lf, symlinkfilename)
+ except (NotImplementedError, OSError):
+ if not printed_symlink_error:
+ print("Symlink creation does not work on this platform. "
+ "Skipping all symlinking.")
+ printed_symlink_error = True
+ if file_copied:
+ self.did_install_something = True
+ try:
+ self.fix_rpath(outname, t.rpath_dirs_to_remove, install_rpath, final_path,
+ install_name_mappings, verbose=False)
+ except SystemExit as e:
+ if isinstance(e.code, int) and e.code == 0:
+ pass
+ else:
+ raise
+
+
+def rebuild_all(wd: str) -> bool:
+ if not (Path(wd) / 'build.ninja').is_file():
+ print('Only ninja backend is supported to rebuild the project before installation.')
+ return True
+
+ ninja = environment.detect_ninja()
+ if not ninja:
+ print("Can't find ninja, can't rebuild test.")
+ return False
+
+ ret = subprocess.run(ninja + ['-C', wd]).returncode
+ if ret != 0:
+ print(f'Could not rebuild {wd}')
+ return False
+
+ return True
+
+
+def run(opts: 'ArgumentType') -> int:
+ datafilename = 'meson-private/install.dat'
+ private_dir = os.path.dirname(datafilename)
+ log_dir = os.path.join(private_dir, '../meson-logs')
+ if not os.path.exists(os.path.join(opts.wd, datafilename)):
+ sys.exit('Install data not found. Run this command in build directory root.')
+ if not opts.no_rebuild:
+ if not rebuild_all(opts.wd):
+ sys.exit(-1)
+ os.chdir(opts.wd)
+ with open(os.path.join(log_dir, 'install-log.txt'), 'w', encoding='utf-8') as lf:
+ installer = Installer(opts, lf)
+ append_to_log(lf, '# List of files installed by Meson')
+ append_to_log(lf, '# Does not contain files installed by custom scripts.')
+ if opts.profile:
+ import cProfile as profile
+ fname = os.path.join(private_dir, 'profile-installer.log')
+ profile.runctx('installer.do_install(datafilename)', globals(), locals(), filename=fname)
+ else:
+ installer.do_install(datafilename)
+ return 0
diff --git a/meson/mesonbuild/mintro.py b/meson/mesonbuild/mintro.py
new file mode 100644
index 000000000..a79219b3d
--- /dev/null
+++ b/meson/mesonbuild/mintro.py
@@ -0,0 +1,543 @@
+# Copyright 2014-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This is a helper script for IDE developers. It allows you to
+extract information such as list of targets, files, compiler flags,
+tests and so on. All output is in JSON for simple parsing.
+
+Currently only works for the Ninja backend. Others use generated
+project files and don't need this info."""
+
+import collections
+import json
+from . import build, coredata as cdata
+from . import mesonlib
+from .ast import IntrospectionInterpreter, build_target_functions, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstJSONPrinter
+from . import mlog
+from .backend import backends
+from .mparser import BaseNode, FunctionNode, ArrayNode, ArgumentNode, StringNode
+from .interpreter import Interpreter
+from pathlib import Path, PurePath
+import typing as T
+import os
+import argparse
+
+from .mesonlib import OptionKey
+
+def get_meson_info_file(info_dir: str) -> str:
+ return os.path.join(info_dir, 'meson-info.json')
+
+def get_meson_introspection_version() -> str:
+ return '1.0.0'
+
+def get_meson_introspection_required_version() -> T.List[str]:
+ return ['>=1.0', '<2.0']
+
+class IntroCommand:
+ def __init__(self,
+ desc: str,
+ func: T.Optional[T.Callable[[], T.Union[dict, list]]] = None,
+ no_bd: T.Optional[T.Callable[[IntrospectionInterpreter], T.Union[dict, list]]] = None) -> None:
+ self.desc = desc + '.'
+ self.func = func
+ self.no_bd = no_bd
+
+def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None,
+ builddata: T.Optional[build.Build] = None,
+ backend: T.Optional[backends.Backend] = None,
+ sourcedir: T.Optional[str] = None) -> 'T.Mapping[str, IntroCommand]':
+ if backend and builddata:
+ benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks())
+ testdata = backend.create_test_serialisation(builddata.get_tests())
+ installdata = backend.create_install_data()
+ interpreter = backend.interpreter
+ else:
+ benchmarkdata = testdata = installdata = None
+
+ # Enforce key order for argparse
+ return collections.OrderedDict([
+ ('ast', IntroCommand('Dump the AST of the meson file', no_bd=dump_ast)),
+ ('benchmarks', IntroCommand('List all benchmarks', func=lambda: list_benchmarks(benchmarkdata))),
+ ('buildoptions', IntroCommand('List all build options', func=lambda: list_buildoptions(coredata), no_bd=list_buildoptions_from_source)),
+ ('buildsystem_files', IntroCommand('List files that make up the build system', func=lambda: list_buildsystem_files(builddata, interpreter))),
+ ('dependencies', IntroCommand('List external dependencies', func=lambda: list_deps(coredata), no_bd=list_deps_from_source)),
+ ('scan_dependencies', IntroCommand('Scan for dependencies used in the meson.build file', no_bd=list_deps_from_source)),
+ ('installed', IntroCommand('List all installed files and directories', func=lambda: list_installed(installdata))),
+ ('projectinfo', IntroCommand('Information about projects', func=lambda: list_projinfo(builddata), no_bd=list_projinfo_from_source)),
+ ('targets', IntroCommand('List top level targets', func=lambda: list_targets(builddata, installdata, backend), no_bd=list_targets_from_source)),
+ ('tests', IntroCommand('List all unit tests', func=lambda: list_tests(testdata))),
+ ])
+
+def add_arguments(parser: argparse.ArgumentParser) -> None:
+ intro_types = get_meson_introspection_types()
+ for key, val in intro_types.items():
+ flag = '--' + key.replace('_', '-')
+ parser.add_argument(flag, action='store_true', dest=key, default=False, help=val.desc)
+
+ parser.add_argument('--backend', choices=sorted(cdata.backendlist), dest='backend', default='ninja',
+ help='The backend to use for the --buildoptions introspection.')
+ parser.add_argument('-a', '--all', action='store_true', dest='all', default=False,
+ help='Print all available information.')
+ parser.add_argument('-i', '--indent', action='store_true', dest='indent', default=False,
+ help='Enable pretty printed JSON.')
+ parser.add_argument('-f', '--force-object-output', action='store_true', dest='force_dict', default=False,
+ help='Always use the new JSON format for multiple entries (even for 0 and 1 introspection commands)')
+ parser.add_argument('builddir', nargs='?', default='.', help='The build directory')
+
+def dump_ast(intr: IntrospectionInterpreter) -> T.Dict[str, T.Any]:
+ printer = AstJSONPrinter()
+ intr.ast.accept(printer)
+ return printer.result
+
+def list_installed(installdata: backends.InstallData) -> T.Dict[str, str]:
+ res = {}
+ if installdata is not None:
+ for t in installdata.targets:
+ res[os.path.join(installdata.build_dir, t.fname)] = \
+ os.path.join(installdata.prefix, t.outdir, os.path.basename(t.fname))
+ for alias in t.aliases.keys():
+ res[os.path.join(installdata.build_dir, alias)] = \
+ os.path.join(installdata.prefix, t.outdir, os.path.basename(alias))
+ for i in installdata.data:
+ res[i.path] = os.path.join(installdata.prefix, i.install_path)
+ for i in installdata.headers:
+ res[i.path] = os.path.join(installdata.prefix, i.install_path, os.path.basename(i.path))
+ for i in installdata.man:
+ res[i.path] = os.path.join(installdata.prefix, i.install_path)
+ for i in installdata.install_subdirs:
+ res[i.path] = os.path.join(installdata.prefix, i.install_path)
+ return res
+
+def get_target_dir(coredata: cdata.CoreData, subdir: str) -> str:
+ if coredata.get_option(OptionKey('layout')) == 'flat':
+ return 'meson-out'
+ else:
+ return subdir
+
+def list_targets_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
+ tlist = [] # type: T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]
+ root_dir = Path(intr.source_root)
+
+ def nodes_to_paths(node_list: T.List[BaseNode]) -> T.List[Path]:
+ res = [] # type: T.List[Path]
+ for n in node_list:
+ args = [] # type: T.List[BaseNode]
+ if isinstance(n, FunctionNode):
+ args = list(n.args.arguments)
+ if n.func_name in build_target_functions:
+ args.pop(0)
+ elif isinstance(n, ArrayNode):
+ args = n.args.arguments
+ elif isinstance(n, ArgumentNode):
+ args = n.arguments
+ for j in args:
+ if isinstance(j, StringNode):
+ assert isinstance(j.value, str)
+ res += [Path(j.value)]
+ elif isinstance(j, str):
+ res += [Path(j)]
+ res = [root_dir / i['subdir'] / x for x in res]
+ res = [x.resolve() for x in res]
+ return res
+
+ for i in intr.targets:
+ sources = nodes_to_paths(i['sources'])
+ extra_f = nodes_to_paths(i['extra_files'])
+ outdir = get_target_dir(intr.coredata, i['subdir'])
+
+ tlist += [{
+ 'name': i['name'],
+ 'id': i['id'],
+ 'type': i['type'],
+ 'defined_in': i['defined_in'],
+ 'filename': [os.path.join(outdir, x) for x in i['outputs']],
+ 'build_by_default': i['build_by_default'],
+ 'target_sources': [{
+ 'language': 'unknown',
+ 'compiler': [],
+ 'parameters': [],
+ 'sources': [str(x) for x in sources],
+ 'generated_sources': []
+ }],
+ 'extra_files': [str(x) for x in extra_f],
+ 'subproject': None, # Subprojects are not supported
+ 'installed': i['installed']
+ }]
+
+ return tlist
+
+def list_targets(builddata: build.Build, installdata: backends.InstallData, backend: backends.Backend) -> T.List[T.Any]:
+ tlist = [] # type: T.List[T.Any]
+ build_dir = builddata.environment.get_build_dir()
+ src_dir = builddata.environment.get_source_dir()
+
+ # Fast lookup table for installation files
+ install_lookuptable = {}
+ for i in installdata.targets:
+ out = [os.path.join(installdata.prefix, i.outdir, os.path.basename(i.fname))]
+ out += [os.path.join(installdata.prefix, i.outdir, os.path.basename(x)) for x in i.aliases]
+ install_lookuptable[os.path.basename(i.fname)] = [str(PurePath(x)) for x in out]
+
+ for (idname, target) in builddata.get_targets().items():
+ if not isinstance(target, build.Target):
+ raise RuntimeError('The target object in `builddata.get_targets()` is not of type `build.Target`. Please file a bug with this error message.')
+
+ outdir = get_target_dir(builddata.environment.coredata, target.subdir)
+ t = {
+ 'name': target.get_basename(),
+ 'id': idname,
+ 'type': target.get_typename(),
+ 'defined_in': os.path.normpath(os.path.join(src_dir, target.subdir, 'meson.build')),
+ 'filename': [os.path.join(build_dir, outdir, x) for x in target.get_outputs()],
+ 'build_by_default': target.build_by_default,
+ 'target_sources': backend.get_introspection_data(idname, target),
+ 'extra_files': [os.path.normpath(os.path.join(src_dir, x.subdir, x.fname)) for x in target.extra_files],
+ 'subproject': target.subproject or None
+ }
+
+ if installdata and target.should_install():
+ t['installed'] = True
+ ifn = [install_lookuptable.get(x, [None]) for x in target.get_outputs()]
+ t['install_filename'] = [x for sublist in ifn for x in sublist] # flatten the list
+ else:
+ t['installed'] = False
+ tlist.append(t)
+ return tlist
+
+def list_buildoptions_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]:
+ subprojects = [i['name'] for i in intr.project_data['subprojects']]
+ return list_buildoptions(intr.coredata, subprojects)
+
+def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[str]] = None) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]:
+ optlist = [] # type: T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]
+ subprojects = subprojects or []
+
+ dir_option_names = set(cdata.BUILTIN_DIR_OPTIONS)
+ test_option_names = {OptionKey('errorlogs'),
+ OptionKey('stdsplit')}
+
+ dir_options: 'cdata.KeyedOptionDictType' = {}
+ test_options: 'cdata.KeyedOptionDictType' = {}
+ core_options: 'cdata.KeyedOptionDictType' = {}
+ for k, v in coredata.options.items():
+ if k in dir_option_names:
+ dir_options[k] = v
+ elif k in test_option_names:
+ test_options[k] = v
+ elif k.is_builtin():
+ core_options[k] = v
+ if not v.yielding:
+ for s in subprojects:
+ core_options[k.evolve(subproject=s)] = v
+
+ def add_keys(options: 'cdata.KeyedOptionDictType', section: str) -> None:
+ for key, opt in sorted(options.items()):
+ optdict = {'name': str(key), 'value': opt.value, 'section': section,
+ 'machine': key.machine.get_lower_case_name() if coredata.is_per_machine_option(key) else 'any'}
+ if isinstance(opt, cdata.UserStringOption):
+ typestr = 'string'
+ elif isinstance(opt, cdata.UserBooleanOption):
+ typestr = 'boolean'
+ elif isinstance(opt, cdata.UserComboOption):
+ optdict['choices'] = opt.choices
+ typestr = 'combo'
+ elif isinstance(opt, cdata.UserIntegerOption):
+ typestr = 'integer'
+ elif isinstance(opt, cdata.UserArrayOption):
+ typestr = 'array'
+ if opt.choices:
+ optdict['choices'] = opt.choices
+ else:
+ raise RuntimeError("Unknown option type")
+ optdict['type'] = typestr
+ optdict['description'] = opt.description
+ optlist.append(optdict)
+
+ add_keys(core_options, 'core')
+ add_keys({k: v for k, v in coredata.options.items() if k.is_backend()}, 'backend')
+ add_keys({k: v for k, v in coredata.options.items() if k.is_base()}, 'base')
+ add_keys(
+ {k: v for k, v in sorted(coredata.options.items(), key=lambda i: i[0].machine) if k.is_compiler()},
+ 'compiler',
+ )
+ add_keys(dir_options, 'directory')
+ add_keys({k: v for k, v in coredata.options.items() if k.is_project()}, 'user')
+ add_keys(test_options, 'test')
+ return optlist
+
+def find_buildsystem_files_list(src_dir: str) -> T.List[str]:
+ # I feel dirty about this. But only slightly.
+ filelist = [] # type: T.List[str]
+ for root, _, files in os.walk(src_dir):
+ for f in files:
+ if f == 'meson.build' or f == 'meson_options.txt':
+ filelist.append(os.path.relpath(os.path.join(root, f), src_dir))
+ return filelist
+
+def list_buildsystem_files(builddata: build.Build, interpreter: Interpreter) -> T.List[str]:
+ src_dir = builddata.environment.get_source_dir()
+ filelist = interpreter.get_build_def_files() # type: T.List[str]
+ filelist = [PurePath(src_dir, x).as_posix() for x in filelist]
+ return filelist
+
+def list_deps_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool]]]:
+ result = [] # type: T.List[T.Dict[str, T.Union[str, bool]]]
+ for i in intr.dependencies:
+ keys = [
+ 'name',
+ 'required',
+ 'version',
+ 'has_fallback',
+ 'conditional',
+ ]
+ result += [{k: v for k, v in i.items() if k in keys}]
+ return result
+
+def list_deps(coredata: cdata.CoreData) -> T.List[T.Dict[str, T.Union[str, T.List[str]]]]:
+ result = [] # type: T.List[T.Dict[str, T.Union[str, T.List[str]]]]
+ for d in coredata.deps.host.values():
+ if d.found():
+ result += [{'name': d.name,
+ 'version': d.get_version(),
+ 'compile_args': d.get_compile_args(),
+ 'link_args': d.get_link_args()}]
+ return result
+
+def get_test_list(testdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
+ result = [] # type: T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]
+ for t in testdata:
+ to = {} # type: T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]
+ if isinstance(t.fname, str):
+ fname = [t.fname]
+ else:
+ fname = t.fname
+ to['cmd'] = fname + t.cmd_args
+ if isinstance(t.env, build.EnvironmentVariables):
+ to['env'] = t.env.get_env({})
+ else:
+ to['env'] = t.env
+ to['name'] = t.name
+ to['workdir'] = t.workdir
+ to['timeout'] = t.timeout
+ to['suite'] = t.suite
+ to['is_parallel'] = t.is_parallel
+ to['priority'] = t.priority
+ to['protocol'] = str(t.protocol)
+ to['depends'] = t.depends
+ result.append(to)
+ return result
+
+def list_tests(testdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
+ return get_test_list(testdata)
+
+def list_benchmarks(benchdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
+ return get_test_list(benchdata)
+
+def list_projinfo(builddata: build.Build) -> T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]:
+ result = {'version': builddata.project_version,
+ 'descriptive_name': builddata.project_name,
+ 'subproject_dir': builddata.subproject_dir} # type: T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]
+ subprojects = []
+ for k, v in builddata.subprojects.items():
+ c = {'name': k,
+ 'version': v,
+ 'descriptive_name': builddata.projects.get(k)} # type: T.Dict[str, str]
+ subprojects.append(c)
+ result['subprojects'] = subprojects
+ return result
+
+def list_projinfo_from_source(intr: IntrospectionInterpreter) -> T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]:
+ sourcedir = intr.source_root
+ files = find_buildsystem_files_list(sourcedir)
+ files = [os.path.normpath(x) for x in files]
+
+ for i in intr.project_data['subprojects']:
+ basedir = os.path.join(intr.subproject_dir, i['name'])
+ i['buildsystem_files'] = [x for x in files if x.startswith(basedir)]
+ files = [x for x in files if not x.startswith(basedir)]
+
+ intr.project_data['buildsystem_files'] = files
+ intr.project_data['subproject_dir'] = intr.subproject_dir
+ return intr.project_data
+
+def print_results(options: argparse.Namespace, results: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.Any]]]], indent: int) -> int:
+ if not results and not options.force_dict:
+ print('No command specified')
+ return 1
+ elif len(results) == 1 and not options.force_dict:
+ # Make to keep the existing output format for a single option
+ print(json.dumps(results[0][1], indent=indent))
+ else:
+ out = {}
+ for i in results:
+ out[i[0]] = i[1]
+ print(json.dumps(out, indent=indent))
+ return 0
+
+def get_infodir(builddir: T.Optional[str] = None) -> str:
+ infodir = 'meson-info'
+ if builddir is not None:
+ infodir = os.path.join(builddir, infodir)
+ return infodir
+
+def get_info_file(infodir: str, kind: T.Optional[str] = None) -> str:
+ return os.path.join(infodir,
+ 'meson-info.json' if not kind else f'intro-{kind}.json')
+
+def load_info_file(infodir: str, kind: T.Optional[str] = None) -> T.Any:
+ with open(get_info_file(infodir, kind), encoding='utf-8') as fp:
+ return json.load(fp)
+
+def run(options: argparse.Namespace) -> int:
+ datadir = 'meson-private'
+ infodir = get_infodir(options.builddir)
+ if options.builddir is not None:
+ datadir = os.path.join(options.builddir, datadir)
+ indent = 4 if options.indent else None
+ results = [] # type: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]]
+ sourcedir = '.' if options.builddir == 'meson.build' else options.builddir[:-11]
+ intro_types = get_meson_introspection_types(sourcedir=sourcedir)
+
+ if 'meson.build' in [os.path.basename(options.builddir), options.builddir]:
+ # Make sure that log entries in other parts of meson don't interfere with the JSON output
+ mlog.disable()
+ backend = backends.get_backend_from_name(options.backend)
+ assert backend is not None
+ intr = IntrospectionInterpreter(sourcedir, '', backend.name, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()])
+ intr.analyze()
+ # Re-enable logging just in case
+ mlog.enable()
+ for key, val in intro_types.items():
+ if (not options.all and not getattr(options, key, False)) or not val.no_bd:
+ continue
+ results += [(key, val.no_bd(intr))]
+ return print_results(options, results, indent)
+
+ try:
+ raw = load_info_file(infodir)
+ intro_vers = raw.get('introspection', {}).get('version', {}).get('full', '0.0.0')
+ except FileNotFoundError:
+ if not os.path.isdir(datadir) or not os.path.isdir(infodir):
+ print('Current directory is not a meson build directory.\n'
+ 'Please specify a valid build dir or change the working directory to it.')
+ else:
+ print('Introspection file {} does not exist.\n'
+ 'It is also possible that the build directory was generated with an old\n'
+ 'meson version. Please regenerate it in this case.'.format(get_info_file(infodir)))
+ return 1
+
+ vers_to_check = get_meson_introspection_required_version()
+ for i in vers_to_check:
+ if not mesonlib.version_compare(intro_vers, i):
+ print('Introspection version {} is not supported. '
+ 'The required version is: {}'
+ .format(intro_vers, ' and '.join(vers_to_check)))
+ return 1
+
+ # Extract introspection information from JSON
+ for i in intro_types.keys():
+ if not intro_types[i].func:
+ continue
+ if not options.all and not getattr(options, i, False):
+ continue
+ try:
+ results += [(i, load_info_file(infodir, i))]
+ except FileNotFoundError:
+ print('Introspection file {} does not exist.'.format(get_info_file(infodir, i)))
+ return 1
+
+ return print_results(options, results, indent)
+
+updated_introspection_files = [] # type: T.List[str]
+
+def write_intro_info(intro_info: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.Any]]]], info_dir: str) -> None:
+ global updated_introspection_files
+ for i in intro_info:
+ out_file = os.path.join(info_dir, 'intro-{}.json'.format(i[0]))
+ tmp_file = os.path.join(info_dir, 'tmp_dump.json')
+ with open(tmp_file, 'w', encoding='utf-8') as fp:
+ json.dump(i[1], fp)
+ fp.flush() # Not sure if this is needed
+ os.replace(tmp_file, out_file)
+ updated_introspection_files += [i[0]]
+
+def generate_introspection_file(builddata: build.Build, backend: backends.Backend) -> None:
+ coredata = builddata.environment.get_coredata()
+ intro_types = get_meson_introspection_types(coredata=coredata, builddata=builddata, backend=backend)
+ intro_info = [] # type: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]]
+
+ for key, val in intro_types.items():
+ if not val.func:
+ continue
+ intro_info += [(key, val.func())]
+
+ write_intro_info(intro_info, builddata.environment.info_dir)
+
+def update_build_options(coredata: cdata.CoreData, info_dir: str) -> None:
+ intro_info = [
+ ('buildoptions', list_buildoptions(coredata))
+ ]
+
+ write_intro_info(intro_info, info_dir)
+
+def split_version_string(version: str) -> T.Dict[str, T.Union[str, int]]:
+ vers_list = version.split('.')
+ return {
+ 'full': version,
+ 'major': int(vers_list[0] if len(vers_list) > 0 else 0),
+ 'minor': int(vers_list[1] if len(vers_list) > 1 else 0),
+ 'patch': int(vers_list[2] if len(vers_list) > 2 else 0)
+ }
+
+def write_meson_info_file(builddata: build.Build, errors: list, build_files_updated: bool = False) -> None:
+ global updated_introspection_files
+ info_dir = builddata.environment.info_dir
+ info_file = get_meson_info_file(info_dir)
+ intro_types = get_meson_introspection_types()
+ intro_info = {}
+
+ for i in intro_types.keys():
+ if not intro_types[i].func:
+ continue
+ intro_info[i] = {
+ 'file': f'intro-{i}.json',
+ 'updated': i in updated_introspection_files
+ }
+
+ info_data = {
+ 'meson_version': split_version_string(cdata.version),
+ 'directories': {
+ 'source': builddata.environment.get_source_dir(),
+ 'build': builddata.environment.get_build_dir(),
+ 'info': info_dir,
+ },
+ 'introspection': {
+ 'version': split_version_string(get_meson_introspection_version()),
+ 'information': intro_info,
+ },
+ 'build_files_updated': build_files_updated,
+ }
+
+ if errors:
+ info_data['error'] = True
+ info_data['error_list'] = [x if isinstance(x, str) else str(x) for x in errors]
+ else:
+ info_data['error'] = False
+
+ # Write the data to disc
+ tmp_file = os.path.join(info_dir, 'tmp_dump.json')
+ with open(tmp_file, 'w', encoding='utf-8') as fp:
+ json.dump(info_data, fp)
+ fp.flush()
+ os.replace(tmp_file, info_file)
diff --git a/meson/mesonbuild/mlog.py b/meson/mesonbuild/mlog.py
new file mode 100644
index 000000000..06d8a0b49
--- /dev/null
+++ b/meson/mesonbuild/mlog.py
@@ -0,0 +1,395 @@
+# Copyright 2013-2014 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import io
+import sys
+import time
+import platform
+import typing as T
+from contextlib import contextmanager
+from pathlib import Path
+
+if T.TYPE_CHECKING:
+ from ._typing import StringProtocol, SizedStringProtocol
+
+"""This is (mostly) a standalone module used to write logging
+information about Meson runs. Some output goes to screen,
+some to logging dir and some goes to both."""
+
+def _windows_ansi() -> bool:
+ # windll only exists on windows, so mypy will get mad
+ from ctypes import windll, byref # type: ignore
+ from ctypes.wintypes import DWORD
+
+ kernel = windll.kernel32
+ stdout = kernel.GetStdHandle(-11)
+ mode = DWORD()
+ if not kernel.GetConsoleMode(stdout, byref(mode)):
+ return False
+ # ENABLE_VIRTUAL_TERMINAL_PROCESSING == 0x4
+ # If the call to enable VT processing fails (returns 0), we fallback to
+ # original behavior
+ return bool(kernel.SetConsoleMode(stdout, mode.value | 0x4) or os.environ.get('ANSICON'))
+
+def colorize_console() -> bool:
+ _colorize_console = getattr(sys.stdout, 'colorize_console', None) # type: bool
+ if _colorize_console is not None:
+ return _colorize_console
+
+ try:
+ if platform.system().lower() == 'windows':
+ _colorize_console = os.isatty(sys.stdout.fileno()) and _windows_ansi()
+ else:
+ _colorize_console = os.isatty(sys.stdout.fileno()) and os.environ.get('TERM', 'dumb') != 'dumb'
+ except Exception:
+ _colorize_console = False
+
+ sys.stdout.colorize_console = _colorize_console # type: ignore[attr-defined]
+ return _colorize_console
+
+def setup_console() -> None:
+ # on Windows, a subprocess might call SetConsoleMode() on the console
+ # connected to stdout and turn off ANSI escape processing. Call this after
+ # running a subprocess to ensure we turn it on again.
+ if platform.system().lower() == 'windows':
+ try:
+ delattr(sys.stdout, 'colorize_console')
+ except AttributeError:
+ pass
+
+log_dir = None # type: T.Optional[str]
+log_file = None # type: T.Optional[T.TextIO]
+log_fname = 'meson-log.txt' # type: str
+log_depth = [] # type: T.List[str]
+log_timestamp_start = None # type: T.Optional[float]
+log_fatal_warnings = False # type: bool
+log_disable_stdout = False # type: bool
+log_errors_only = False # type: bool
+_in_ci = 'CI' in os.environ # type: bool
+_logged_once = set() # type: T.Set[T.Tuple[str, ...]]
+log_warnings_counter = 0 # type: int
+
+def disable() -> None:
+ global log_disable_stdout
+ log_disable_stdout = True
+
+def enable() -> None:
+ global log_disable_stdout
+ log_disable_stdout = False
+
+def set_quiet() -> None:
+ global log_errors_only
+ log_errors_only = True
+
+def set_verbose() -> None:
+ global log_errors_only
+ log_errors_only = False
+
+def initialize(logdir: str, fatal_warnings: bool = False) -> None:
+ global log_dir, log_file, log_fatal_warnings
+ log_dir = logdir
+ log_file = open(os.path.join(logdir, log_fname), 'w', encoding='utf-8')
+ log_fatal_warnings = fatal_warnings
+
+def set_timestamp_start(start: float) -> None:
+ global log_timestamp_start
+ log_timestamp_start = start
+
+def shutdown() -> T.Optional[str]:
+ global log_file
+ if log_file is not None:
+ path = log_file.name
+ exception_around_goer = log_file
+ log_file = None
+ exception_around_goer.close()
+ return path
+ return None
+
+class AnsiDecorator:
+ plain_code = "\033[0m"
+
+ def __init__(self, text: str, code: str, quoted: bool = False):
+ self.text = text
+ self.code = code
+ self.quoted = quoted
+
+ def get_text(self, with_codes: bool) -> str:
+ text = self.text
+ if with_codes and self.code:
+ text = self.code + self.text + AnsiDecorator.plain_code
+ if self.quoted:
+ text = f'"{text}"'
+ return text
+
+ def __len__(self) -> int:
+ return len(self.text)
+
+ def __str__(self) -> str:
+ return self.get_text(colorize_console())
+
+TV_Loggable = T.Union[str, AnsiDecorator, 'StringProtocol']
+TV_LoggableList = T.List[TV_Loggable]
+
+class AnsiText:
+ def __init__(self, *args: 'SizedStringProtocol'):
+ self.args = args
+
+ def __len__(self) -> int:
+ return sum(len(x) for x in self.args)
+
+ def __str__(self) -> str:
+ return ''.join(str(x) for x in self.args)
+
+
+def bold(text: str, quoted: bool = False) -> AnsiDecorator:
+ return AnsiDecorator(text, "\033[1m", quoted=quoted)
+
+def plain(text: str) -> AnsiDecorator:
+ return AnsiDecorator(text, "")
+
+def red(text: str) -> AnsiDecorator:
+ return AnsiDecorator(text, "\033[1;31m")
+
+def green(text: str) -> AnsiDecorator:
+ return AnsiDecorator(text, "\033[1;32m")
+
+def yellow(text: str) -> AnsiDecorator:
+ return AnsiDecorator(text, "\033[1;33m")
+
+def blue(text: str) -> AnsiDecorator:
+ return AnsiDecorator(text, "\033[1;34m")
+
+def cyan(text: str) -> AnsiDecorator:
+ return AnsiDecorator(text, "\033[1;36m")
+
+def normal_red(text: str) -> AnsiDecorator:
+ return AnsiDecorator(text, "\033[31m")
+
+def normal_green(text: str) -> AnsiDecorator:
+ return AnsiDecorator(text, "\033[32m")
+
+def normal_yellow(text: str) -> AnsiDecorator:
+ return AnsiDecorator(text, "\033[33m")
+
+def normal_blue(text: str) -> AnsiDecorator:
+ return AnsiDecorator(text, "\033[34m")
+
+def normal_cyan(text: str) -> AnsiDecorator:
+ return AnsiDecorator(text, "\033[36m")
+
+# This really should be AnsiDecorator or anything that implements
+# __str__(), but that requires protocols from typing_extensions
+def process_markup(args: T.Sequence[TV_Loggable], keep: bool) -> T.List[str]:
+ arr = [] # type: T.List[str]
+ if log_timestamp_start is not None:
+ arr = ['[{:.3f}]'.format(time.monotonic() - log_timestamp_start)]
+ for arg in args:
+ if arg is None:
+ continue
+ if isinstance(arg, str):
+ arr.append(arg)
+ elif isinstance(arg, AnsiDecorator):
+ arr.append(arg.get_text(keep))
+ else:
+ arr.append(str(arg))
+ return arr
+
+def force_print(*args: str, nested: str, **kwargs: T.Any) -> None:
+ if log_disable_stdout:
+ return
+ iostr = io.StringIO()
+ kwargs['file'] = iostr
+ print(*args, **kwargs)
+
+ raw = iostr.getvalue()
+ if log_depth:
+ prepend = log_depth[-1] + '| ' if nested else ''
+ lines = []
+ for l in raw.split('\n'):
+ l = l.strip()
+ lines.append(prepend + l if l else '')
+ raw = '\n'.join(lines)
+
+ # _Something_ is going to get printed.
+ try:
+ print(raw, end='')
+ except UnicodeEncodeError:
+ cleaned = raw.encode('ascii', 'replace').decode('ascii')
+ print(cleaned, end='')
+
+# We really want a heterogeneous dict for this, but that's in typing_extensions
+def debug(*args: TV_Loggable, **kwargs: T.Any) -> None:
+ arr = process_markup(args, False)
+ if log_file is not None:
+ print(*arr, file=log_file, **kwargs)
+ log_file.flush()
+
+def _debug_log_cmd(cmd: str, args: T.List[str]) -> None:
+ if not _in_ci:
+ return
+ args = [f'"{x}"' for x in args] # Quote all args, just in case
+ debug('!meson_ci!/{} {}'.format(cmd, ' '.join(args)))
+
+def cmd_ci_include(file: str) -> None:
+ _debug_log_cmd('ci_include', [file])
+
+
+def log(*args: TV_Loggable, is_error: bool = False,
+ once: bool = False, **kwargs: T.Any) -> None:
+ if once:
+ return log_once(*args, is_error=is_error, **kwargs)
+ return _log(*args, is_error=is_error, **kwargs)
+
+
+def _log(*args: TV_Loggable, is_error: bool = False,
+ **kwargs: T.Any) -> None:
+ nested = kwargs.pop('nested', True)
+ arr = process_markup(args, False)
+ if log_file is not None:
+ print(*arr, file=log_file, **kwargs)
+ log_file.flush()
+ if colorize_console():
+ arr = process_markup(args, True)
+ if not log_errors_only or is_error:
+ force_print(*arr, nested=nested, **kwargs)
+
+def log_once(*args: TV_Loggable, is_error: bool = False,
+ **kwargs: T.Any) -> None:
+ """Log variant that only prints a given message one time per meson invocation.
+
+ This considers ansi decorated values by the values they wrap without
+ regard for the AnsiDecorator itself.
+ """
+ def to_str(x: TV_Loggable) -> str:
+ if isinstance(x, str):
+ return x
+ if isinstance(x, AnsiDecorator):
+ return x.text
+ return str(x)
+ t = tuple(to_str(a) for a in args)
+ if t in _logged_once:
+ return
+ _logged_once.add(t)
+ _log(*args, is_error=is_error, **kwargs)
+
+# This isn't strictly correct. What we really want here is something like:
+# class StringProtocol(typing_extensions.Protocol):
+#
+# def __str__(self) -> str: ...
+#
+# This would more accurately embody what this function can handle, but we
+# don't have that yet, so instead we'll do some casting to work around it
+def get_error_location_string(fname: str, lineno: str) -> str:
+ return f'{fname}:{lineno}:'
+
+def _log_error(severity: str, *rargs: TV_Loggable,
+ once: bool = False, fatal: bool = True, **kwargs: T.Any) -> None:
+ from .mesonlib import MesonException, relpath
+
+ # The typing requirements here are non-obvious. Lists are invariant,
+ # therefore T.List[A] and T.List[T.Union[A, B]] are not able to be joined
+ if severity == 'notice':
+ label = [bold('NOTICE:')] # type: TV_LoggableList
+ elif severity == 'warning':
+ label = [yellow('WARNING:')]
+ elif severity == 'error':
+ label = [red('ERROR:')]
+ elif severity == 'deprecation':
+ label = [red('DEPRECATION:')]
+ else:
+ raise MesonException('Invalid severity ' + severity)
+ # rargs is a tuple, not a list
+ args = label + list(rargs)
+
+ location = kwargs.pop('location', None)
+ if location is not None:
+ location_file = relpath(location.filename, os.getcwd())
+ location_str = get_error_location_string(location_file, location.lineno)
+ # Unions are frankly awful, and we have to T.cast here to get mypy
+ # to understand that the list concatenation is safe
+ location_list = T.cast(TV_LoggableList, [location_str])
+ args = location_list + args
+
+ log(*args, once=once, **kwargs)
+
+ global log_warnings_counter
+ log_warnings_counter += 1
+
+ if log_fatal_warnings and fatal:
+ raise MesonException("Fatal warnings enabled, aborting")
+
+def error(*args: TV_Loggable, **kwargs: T.Any) -> None:
+ return _log_error('error', *args, **kwargs, is_error=True)
+
+def warning(*args: TV_Loggable, **kwargs: T.Any) -> None:
+ return _log_error('warning', *args, **kwargs, is_error=True)
+
+def deprecation(*args: TV_Loggable, **kwargs: T.Any) -> None:
+ return _log_error('deprecation', *args, **kwargs, is_error=True)
+
+def notice(*args: TV_Loggable, **kwargs: T.Any) -> None:
+ return _log_error('notice', *args, **kwargs, is_error=False)
+
+def get_relative_path(target: Path, current: Path) -> Path:
+ """Get the path to target from current"""
+ # Go up "current" until we find a common ancestor to target
+ acc = ['.']
+ for part in [current, *current.parents]:
+ try:
+ path = target.relative_to(part)
+ return Path(*acc, path)
+ except ValueError:
+ pass
+ acc += ['..']
+
+ # we failed, should not get here
+ return target
+
+def exception(e: Exception, prefix: T.Optional[AnsiDecorator] = None) -> None:
+ if prefix is None:
+ prefix = red('ERROR:')
+ log()
+ args = [] # type: T.List[T.Union[AnsiDecorator, str]]
+ if all(getattr(e, a, None) is not None for a in ['file', 'lineno', 'colno']):
+ # Mypy doesn't follow hasattr, and it's pretty easy to visually inspect
+ # that this is correct, so we'll just ignore it.
+ path = get_relative_path(Path(e.file), Path(os.getcwd())) # type: ignore
+ args.append(f'{path}:{e.lineno}:{e.colno}:') # type: ignore
+ if prefix:
+ args.append(prefix)
+ args.append(str(e))
+ log(*args)
+
+# Format a list for logging purposes as a string. It separates
+# all but the last item with commas, and the last with 'and'.
+def format_list(input_list: T.List[str]) -> str:
+ l = len(input_list)
+ if l > 2:
+ return ' and '.join([', '.join(input_list[:-1]), input_list[-1]])
+ elif l == 2:
+ return ' and '.join(input_list)
+ elif l == 1:
+ return input_list[0]
+ else:
+ return ''
+
+@contextmanager
+def nested(name: str = '') -> T.Generator[None, None, None]:
+ global log_depth
+ log_depth.append(name)
+ try:
+ yield
+ finally:
+ log_depth.pop()
diff --git a/meson/mesonbuild/modules/__init__.py b/meson/mesonbuild/modules/__init__.py
new file mode 100644
index 000000000..737a01c2f
--- /dev/null
+++ b/meson/mesonbuild/modules/__init__.py
@@ -0,0 +1,212 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies that
+# are UI-related.
+
+import os
+import typing as T
+
+from .. import build, mesonlib
+from ..mesonlib import relpath, HoldableObject
+from ..interpreterbase.decorators import noKwargs, noPosargs
+
+if T.TYPE_CHECKING:
+ from ..interpreter import Interpreter
+ from ..interpreterbase import TYPE_var, TYPE_kwargs
+ from ..programs import ExternalProgram
+
+class ModuleState:
+ """Object passed to all module methods.
+
+ This is a WIP API provided to modules, it should be extended to have everything
+ needed so modules does not touch any other part of Meson internal APIs.
+ """
+
+ def __init__(self, interpreter: 'Interpreter') -> None:
+ # Keep it private, it should be accessed only through methods.
+ self._interpreter = interpreter
+
+ self.source_root = interpreter.environment.get_source_dir()
+ self.build_to_src = relpath(interpreter.environment.get_source_dir(),
+ interpreter.environment.get_build_dir())
+ self.subproject = interpreter.subproject
+ self.subdir = interpreter.subdir
+ self.current_lineno = interpreter.current_lineno
+ self.environment = interpreter.environment
+ self.project_name = interpreter.build.project_name
+ self.project_version = interpreter.build.dep_manifest[interpreter.active_projectname]
+ # The backend object is under-used right now, but we will need it:
+ # https://github.com/mesonbuild/meson/issues/1419
+ self.backend = interpreter.backend
+ self.targets = interpreter.build.targets
+ self.data = interpreter.build.data
+ self.headers = interpreter.build.get_headers()
+ self.man = interpreter.build.get_man()
+ self.global_args = interpreter.build.global_args.host
+ self.project_args = interpreter.build.projects_args.host.get(interpreter.subproject, {})
+ self.build_machine = interpreter.builtin['build_machine'].held_object
+ self.host_machine = interpreter.builtin['host_machine'].held_object
+ self.target_machine = interpreter.builtin['target_machine'].held_object
+ self.current_node = interpreter.current_node
+
+ def get_include_args(self, include_dirs: T.Iterable[T.Union[str, build.IncludeDirs]], prefix: str = '-I') -> T.List[str]:
+ if not include_dirs:
+ return []
+
+ srcdir = self.environment.get_source_dir()
+ builddir = self.environment.get_build_dir()
+
+ dirs_str: T.List[str] = []
+ for dirs in include_dirs:
+ if isinstance(dirs, str):
+ dirs_str += [f'{prefix}{dirs}']
+ continue
+
+ # Should be build.IncludeDirs object.
+ basedir = dirs.get_curdir()
+ for d in dirs.get_incdirs():
+ expdir = os.path.join(basedir, d)
+ srctreedir = os.path.join(srcdir, expdir)
+ buildtreedir = os.path.join(builddir, expdir)
+ dirs_str += [f'{prefix}{buildtreedir}',
+ f'{prefix}{srctreedir}']
+ for d in dirs.get_extra_build_dirs():
+ dirs_str += [f'{prefix}{d}']
+
+ return dirs_str
+
+ def find_program(self, prog: T.Union[str, T.List[str]], required: bool = True,
+ version_func: T.Optional[T.Callable[['ExternalProgram'], str]] = None,
+ wanted: T.Optional[str] = None) -> 'ExternalProgram':
+ return self._interpreter.find_program_impl(prog, required=required, version_func=version_func, wanted=wanted)
+
+ def test(self, args: T.Tuple[str, T.Union[build.Executable, build.Jar, 'ExternalProgram', mesonlib.File]],
+ workdir: T.Optional[str] = None,
+ env: T.Union[T.List[str], T.Dict[str, str], str] = None,
+ depends: T.List[T.Union[build.CustomTarget, build.BuildTarget]] = None) -> None:
+ kwargs = {'workdir': workdir,
+ 'env': env,
+ 'depends': depends,
+ }
+ # TODO: Use interpreter internal API, but we need to go through @typed_kwargs
+ self._interpreter.func_test(self.current_node, args, kwargs)
+
+
+class ModuleObject(HoldableObject):
+ """Base class for all objects returned by modules
+ """
+ def __init__(self) -> None:
+ self.methods: T.Dict[
+ str,
+ T.Callable[[ModuleState, T.List['TYPE_var'], 'TYPE_kwargs'], T.Union[ModuleReturnValue, 'TYPE_var']]
+ ] = {}
+
+
+class MutableModuleObject(ModuleObject):
+ pass
+
+
+# FIXME: Port all modules to stop using self.interpreter and use API on
+# ModuleState instead. Modules should stop using this class and instead use
+# ModuleObject base class.
+class ExtensionModule(ModuleObject):
+ def __init__(self, interpreter: 'Interpreter') -> None:
+ super().__init__()
+ self.interpreter = interpreter
+ self.methods.update({
+ 'found': self.found_method,
+ })
+
+ @noPosargs
+ @noKwargs
+ def found_method(self, state: 'ModuleState', args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+ return self.found()
+
+ @staticmethod
+ def found() -> bool:
+ return True
+
+
+class NewExtensionModule(ModuleObject):
+
+ """Class for modern modules
+
+ provides the found method.
+ """
+
+ def __init__(self) -> None:
+ super().__init__()
+ self.methods.update({
+ 'found': self.found_method,
+ })
+
+ @noPosargs
+ @noKwargs
+ def found_method(self, state: 'ModuleState', args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+ return self.found()
+
+ @staticmethod
+ def found() -> bool:
+ return True
+
+
+class NotFoundExtensionModule(NewExtensionModule):
+
+ """Class for modern modules
+
+ provides the found method.
+ """
+
+ @staticmethod
+ def found() -> bool:
+ return False
+
+
+def is_module_library(fname):
+ '''
+ Check if the file is a library-like file generated by a module-specific
+ target, such as GirTarget or TypelibTarget
+ '''
+ if hasattr(fname, 'fname'):
+ fname = fname.fname
+ suffix = fname.split('.')[-1]
+ return suffix in ('gir', 'typelib')
+
+
+class ModuleReturnValue:
+ def __init__(self, return_value: T.Optional['TYPE_var'], new_objects: T.List['TYPE_var']) -> None:
+ self.return_value = return_value
+ assert(isinstance(new_objects, list))
+ self.new_objects = new_objects
+
+class GResourceTarget(build.CustomTarget):
+ def __init__(self, name, subdir, subproject, kwargs):
+ super().__init__(name, subdir, subproject, kwargs)
+
+class GResourceHeaderTarget(build.CustomTarget):
+ def __init__(self, name, subdir, subproject, kwargs):
+ super().__init__(name, subdir, subproject, kwargs)
+
+class GirTarget(build.CustomTarget):
+ def __init__(self, name, subdir, subproject, kwargs):
+ super().__init__(name, subdir, subproject, kwargs)
+
+class TypelibTarget(build.CustomTarget):
+ def __init__(self, name, subdir, subproject, kwargs):
+ super().__init__(name, subdir, subproject, kwargs)
+
+class VapiTarget(build.CustomTarget):
+ def __init__(self, name, subdir, subproject, kwargs):
+ super().__init__(name, subdir, subproject, kwargs)
diff --git a/meson/mesonbuild/modules/cmake.py b/meson/mesonbuild/modules/cmake.py
new file mode 100644
index 000000000..cc259dcdc
--- /dev/null
+++ b/meson/mesonbuild/modules/cmake.py
@@ -0,0 +1,406 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import re
+import os, os.path, pathlib
+import shutil
+import typing as T
+
+from . import ExtensionModule, ModuleReturnValue, ModuleObject
+
+from .. import build, mesonlib, mlog, dependencies
+from ..cmake import SingleTargetOptions, TargetOptions, cmake_defines_to_args
+from ..interpreter import ConfigurationDataObject, SubprojectHolder
+from ..interpreterbase import (
+ FeatureNew,
+ FeatureNewKwargs,
+ FeatureDeprecatedKwargs,
+
+ stringArgs,
+ permittedKwargs,
+ noPosargs,
+ noKwargs,
+
+ InvalidArguments,
+ InterpreterException,
+)
+from ..programs import ExternalProgram
+
+
+COMPATIBILITIES = ['AnyNewerVersion', 'SameMajorVersion', 'SameMinorVersion', 'ExactVersion']
+
+# Taken from https://github.com/Kitware/CMake/blob/master/Modules/CMakePackageConfigHelpers.cmake
+PACKAGE_INIT_BASE = '''
+####### Expanded from \\@PACKAGE_INIT\\@ by configure_package_config_file() #######
+####### Any changes to this file will be overwritten by the next CMake run ####
+####### The input file was @inputFileName@ ########
+
+get_filename_component(PACKAGE_PREFIX_DIR "${CMAKE_CURRENT_LIST_DIR}/@PACKAGE_RELATIVE_PATH@" ABSOLUTE)
+'''
+PACKAGE_INIT_EXT = '''
+# Use original install prefix when loaded through a "/usr move"
+# cross-prefix symbolic link such as /lib -> /usr/lib.
+get_filename_component(_realCurr "${CMAKE_CURRENT_LIST_DIR}" REALPATH)
+get_filename_component(_realOrig "@absInstallDir@" REALPATH)
+if(_realCurr STREQUAL _realOrig)
+ set(PACKAGE_PREFIX_DIR "@installPrefix@")
+endif()
+unset(_realOrig)
+unset(_realCurr)
+'''
+PACKAGE_INIT_SET_AND_CHECK = '''
+macro(set_and_check _var _file)
+ set(${_var} "${_file}")
+ if(NOT EXISTS "${_file}")
+ message(FATAL_ERROR "File or directory ${_file} referenced by variable ${_var} does not exist !")
+ endif()
+endmacro()
+
+####################################################################################
+'''
+
+class CMakeSubproject(ModuleObject):
+ def __init__(self, subp, pv):
+ assert(isinstance(subp, SubprojectHolder))
+ assert(hasattr(subp, 'cm_interpreter'))
+ super().__init__()
+ self.subp = subp
+ self.methods.update({'get_variable': self.get_variable,
+ 'dependency': self.dependency,
+ 'include_directories': self.include_directories,
+ 'target': self.target,
+ 'target_type': self.target_type,
+ 'target_list': self.target_list,
+ 'found': self.found_method,
+ })
+
+ def _args_to_info(self, args):
+ if len(args) != 1:
+ raise InterpreterException('Exactly one argument is required.')
+
+ tgt = args[0]
+ res = self.subp.cm_interpreter.target_info(tgt)
+ if res is None:
+ raise InterpreterException(f'The CMake target {tgt} does not exist\n' +
+ ' Use the following command in your meson.build to list all available targets:\n\n' +
+ ' message(\'CMaket targets:\\n - \' + \'\\n - \'.join(<cmake_subproject>.target_list()))')
+
+ # Make sure that all keys are present (if not this is a bug)
+ assert(all([x in res for x in ['inc', 'src', 'dep', 'tgt', 'func']]))
+ return res
+
+ @noKwargs
+ @stringArgs
+ def get_variable(self, state, args, kwargs):
+ return self.subp.get_variable_method(args, kwargs)
+
+ @FeatureNewKwargs('dependency', '0.56.0', ['include_type'])
+ @permittedKwargs({'include_type'})
+ @stringArgs
+ def dependency(self, state, args, kwargs):
+ info = self._args_to_info(args)
+ if info['func'] == 'executable':
+ raise InvalidArguments(f'{args[0]} is an executable and does not support the dependency() method. Use target() instead.')
+ orig = self.get_variable(state, [info['dep']], {})
+ assert isinstance(orig, dependencies.Dependency)
+ actual = orig.include_type
+ if 'include_type' in kwargs and kwargs['include_type'] != actual:
+ mlog.debug('Current include type is {}. Converting to requested {}'.format(actual, kwargs['include_type']))
+ return orig.generate_system_dependency(kwargs['include_type'])
+ return orig
+
+ @noKwargs
+ @stringArgs
+ def include_directories(self, state, args, kwargs):
+ info = self._args_to_info(args)
+ return self.get_variable(state, [info['inc']], kwargs)
+
+ @noKwargs
+ @stringArgs
+ def target(self, state, args, kwargs):
+ info = self._args_to_info(args)
+ return self.get_variable(state, [info['tgt']], kwargs)
+
+ @noKwargs
+ @stringArgs
+ def target_type(self, state, args, kwargs):
+ info = self._args_to_info(args)
+ return info['func']
+
+ @noPosargs
+ @noKwargs
+ def target_list(self, state, args, kwargs):
+ return self.subp.cm_interpreter.target_list()
+
+ @noPosargs
+ @noKwargs
+ @FeatureNew('CMakeSubproject.found()', '0.53.2')
+ def found_method(self, state, args, kwargs):
+ return self.subp is not None
+
+
+class CMakeSubprojectOptions(ModuleObject):
+ def __init__(self) -> None:
+ super().__init__()
+ self.cmake_options = [] # type: T.List[str]
+ self.target_options = TargetOptions()
+
+ self.methods.update(
+ {
+ 'add_cmake_defines': self.add_cmake_defines,
+ 'set_override_option': self.set_override_option,
+ 'set_install': self.set_install,
+ 'append_compile_args': self.append_compile_args,
+ 'append_link_args': self.append_link_args,
+ 'clear': self.clear,
+ }
+ )
+
+ def _get_opts(self, kwargs: dict) -> SingleTargetOptions:
+ if 'target' in kwargs:
+ return self.target_options[kwargs['target']]
+ return self.target_options.global_options
+
+ @noKwargs
+ def add_cmake_defines(self, state, args, kwargs) -> None:
+ self.cmake_options += cmake_defines_to_args(args)
+
+ @stringArgs
+ @permittedKwargs({'target'})
+ def set_override_option(self, state, args, kwargs) -> None:
+ if len(args) != 2:
+ raise InvalidArguments('set_override_option takes exactly 2 positional arguments')
+ self._get_opts(kwargs).set_opt(args[0], args[1])
+
+ @permittedKwargs({'target'})
+ def set_install(self, state, args, kwargs) -> None:
+ if len(args) != 1 or not isinstance(args[0], bool):
+ raise InvalidArguments('set_install takes exactly 1 boolean argument')
+ self._get_opts(kwargs).set_install(args[0])
+
+ @stringArgs
+ @permittedKwargs({'target'})
+ def append_compile_args(self, state, args, kwargs) -> None:
+ if len(args) < 2:
+ raise InvalidArguments('append_compile_args takes at least 2 positional arguments')
+ self._get_opts(kwargs).append_args(args[0], args[1:])
+
+ @stringArgs
+ @permittedKwargs({'target'})
+ def append_link_args(self, state, args, kwargs) -> None:
+ if not args:
+ raise InvalidArguments('append_link_args takes at least 1 positional argument')
+ self._get_opts(kwargs).append_link_args(args)
+
+ @noPosargs
+ @noKwargs
+ def clear(self, state, args, kwargs) -> None:
+ self.cmake_options.clear()
+ self.target_options = TargetOptions()
+
+
+class CmakeModule(ExtensionModule):
+ cmake_detected = False
+ cmake_root = None
+
+ def __init__(self, interpreter):
+ super().__init__(interpreter)
+ self.methods.update({
+ 'write_basic_package_version_file': self.write_basic_package_version_file,
+ 'configure_package_config_file': self.configure_package_config_file,
+ 'subproject': self.subproject,
+ 'subproject_options': self.subproject_options,
+ })
+
+ def detect_voidp_size(self, env):
+ compilers = env.coredata.compilers.host
+ compiler = compilers.get('c', None)
+ if not compiler:
+ compiler = compilers.get('cpp', None)
+
+ if not compiler:
+ raise mesonlib.MesonException('Requires a C or C++ compiler to compute sizeof(void *).')
+
+ return compiler.sizeof('void *', '', env)
+
+ def detect_cmake(self):
+ if self.cmake_detected:
+ return True
+
+ cmakebin = ExternalProgram('cmake', silent=False)
+ p, stdout, stderr = mesonlib.Popen_safe(cmakebin.get_command() + ['--system-information', '-G', 'Ninja'])[0:3]
+ if p.returncode != 0:
+ mlog.log(f'error retrieving cmake information: returnCode={p.returncode} stdout={stdout} stderr={stderr}')
+ return False
+
+ match = re.search('\nCMAKE_ROOT \\"([^"]+)"\n', stdout.strip())
+ if not match:
+ mlog.log('unable to determine cmake root')
+ return False
+
+ cmakePath = pathlib.PurePath(match.group(1))
+ self.cmake_root = os.path.join(*cmakePath.parts)
+ self.cmake_detected = True
+ return True
+
+ @permittedKwargs({'version', 'name', 'compatibility', 'install_dir'})
+ def write_basic_package_version_file(self, state, _args, kwargs):
+ version = kwargs.get('version', None)
+ if not isinstance(version, str):
+ raise mesonlib.MesonException('Version must be specified.')
+
+ name = kwargs.get('name', None)
+ if not isinstance(name, str):
+ raise mesonlib.MesonException('Name not specified.')
+
+ compatibility = kwargs.get('compatibility', 'AnyNewerVersion')
+ if not isinstance(compatibility, str):
+ raise mesonlib.MesonException('compatibility is not string.')
+ if compatibility not in COMPATIBILITIES:
+ raise mesonlib.MesonException('compatibility must be either AnyNewerVersion, SameMajorVersion or ExactVersion.')
+
+ if not self.detect_cmake():
+ raise mesonlib.MesonException('Unable to find cmake')
+
+ pkgroot = kwargs.get('install_dir', None)
+ if pkgroot is None:
+ pkgroot = os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('libdir')), 'cmake', name)
+ if not isinstance(pkgroot, str):
+ raise mesonlib.MesonException('Install_dir must be a string.')
+
+ template_file = os.path.join(self.cmake_root, 'Modules', f'BasicConfigVersion-{compatibility}.cmake.in')
+ if not os.path.exists(template_file):
+ raise mesonlib.MesonException(f'your cmake installation doesn\'t support the {compatibility} compatibility')
+
+ version_file = os.path.join(state.environment.scratch_dir, f'{name}ConfigVersion.cmake')
+
+ conf = {
+ 'CVF_VERSION': (version, ''),
+ 'CMAKE_SIZEOF_VOID_P': (str(self.detect_voidp_size(state.environment)), '')
+ }
+ mesonlib.do_conf_file(template_file, version_file, conf, 'meson')
+
+ res = build.Data([mesonlib.File(True, state.environment.get_scratch_dir(), version_file)], pkgroot, None, state.subproject)
+ return ModuleReturnValue(res, [res])
+
+ def create_package_file(self, infile, outfile, PACKAGE_RELATIVE_PATH, extra, confdata):
+ package_init = PACKAGE_INIT_BASE.replace('@PACKAGE_RELATIVE_PATH@', PACKAGE_RELATIVE_PATH)
+ package_init = package_init.replace('@inputFileName@', infile)
+ package_init += extra
+ package_init += PACKAGE_INIT_SET_AND_CHECK
+
+ try:
+ with open(infile, encoding='utf-8') as fin:
+ data = fin.readlines()
+ except Exception as e:
+ raise mesonlib.MesonException('Could not read input file {}: {}'.format(infile, str(e)))
+
+ result = []
+ regex = re.compile(r'(?:\\\\)+(?=\\?@)|\\@|@([-a-zA-Z0-9_]+)@')
+ for line in data:
+ line = line.replace('@PACKAGE_INIT@', package_init)
+ line, _missing = mesonlib.do_replacement(regex, line, 'meson', confdata)
+
+ result.append(line)
+
+ outfile_tmp = outfile + "~"
+ with open(outfile_tmp, "w", encoding='utf-8') as fout:
+ fout.writelines(result)
+
+ shutil.copymode(infile, outfile_tmp)
+ mesonlib.replace_if_different(outfile, outfile_tmp)
+
+ @permittedKwargs({'input', 'name', 'install_dir', 'configuration'})
+ def configure_package_config_file(self, state, args, kwargs):
+ if args:
+ raise mesonlib.MesonException('configure_package_config_file takes only keyword arguments.')
+
+ if 'input' not in kwargs:
+ raise mesonlib.MesonException('configure_package_config_file requires "input" keyword.')
+ inputfile = kwargs['input']
+ if isinstance(inputfile, list):
+ if len(inputfile) != 1:
+ m = "Keyword argument 'input' requires exactly one file"
+ raise mesonlib.MesonException(m)
+ inputfile = inputfile[0]
+ if not isinstance(inputfile, (str, mesonlib.File)):
+ raise mesonlib.MesonException("input must be a string or a file")
+ if isinstance(inputfile, str):
+ inputfile = mesonlib.File.from_source_file(state.environment.source_dir, state.subdir, inputfile)
+
+ ifile_abs = inputfile.absolute_path(state.environment.source_dir, state.environment.build_dir)
+
+ if 'name' not in kwargs:
+ raise mesonlib.MesonException('"name" not specified.')
+ name = kwargs['name']
+
+ (ofile_path, ofile_fname) = os.path.split(os.path.join(state.subdir, f'{name}Config.cmake'))
+ ofile_abs = os.path.join(state.environment.build_dir, ofile_path, ofile_fname)
+
+ install_dir = kwargs.get('install_dir', os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('libdir')), 'cmake', name))
+ if not isinstance(install_dir, str):
+ raise mesonlib.MesonException('"install_dir" must be a string.')
+
+ if 'configuration' not in kwargs:
+ raise mesonlib.MesonException('"configuration" not specified.')
+ conf = kwargs['configuration']
+ if not isinstance(conf, ConfigurationDataObject):
+ raise mesonlib.MesonException('Argument "configuration" is not of type configuration_data')
+
+ prefix = state.environment.coredata.get_option(mesonlib.OptionKey('prefix'))
+ abs_install_dir = install_dir
+ if not os.path.isabs(abs_install_dir):
+ abs_install_dir = os.path.join(prefix, install_dir)
+
+ PACKAGE_RELATIVE_PATH = os.path.relpath(prefix, abs_install_dir)
+ extra = ''
+ if re.match('^(/usr)?/lib(64)?/.+', abs_install_dir):
+ extra = PACKAGE_INIT_EXT.replace('@absInstallDir@', abs_install_dir)
+ extra = extra.replace('@installPrefix@', prefix)
+
+ self.create_package_file(ifile_abs, ofile_abs, PACKAGE_RELATIVE_PATH, extra, conf.conf_data)
+ conf.mark_used()
+
+ conffile = os.path.normpath(inputfile.relative_name())
+ if conffile not in self.interpreter.build_def_files:
+ self.interpreter.build_def_files.append(conffile)
+
+ res = build.Data([mesonlib.File(True, ofile_path, ofile_fname)], install_dir, None, state.subproject)
+ self.interpreter.build.data.append(res)
+
+ return res
+
+ @FeatureNew('subproject', '0.51.0')
+ @FeatureNewKwargs('subproject', '0.55.0', ['options'])
+ @FeatureDeprecatedKwargs('subproject', '0.55.0', ['cmake_options'])
+ @permittedKwargs({'cmake_options', 'required', 'options'})
+ @stringArgs
+ def subproject(self, state, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Subproject takes exactly one argument')
+ if 'cmake_options' in kwargs and 'options' in kwargs:
+ raise InterpreterException('"options" cannot be used together with "cmake_options"')
+ dirname = args[0]
+ subp = self.interpreter.do_subproject(dirname, 'cmake', kwargs)
+ if not subp.found():
+ return subp
+ return CMakeSubproject(subp, dirname)
+
+ @FeatureNew('subproject_options', '0.55.0')
+ @noKwargs
+ @noPosargs
+ def subproject_options(self, state, args, kwargs) -> CMakeSubprojectOptions:
+ return CMakeSubprojectOptions()
+
+def initialize(*args, **kwargs):
+ return CmakeModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/dlang.py b/meson/mesonbuild/modules/dlang.py
new file mode 100644
index 000000000..60d28854e
--- /dev/null
+++ b/meson/mesonbuild/modules/dlang.py
@@ -0,0 +1,135 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies that
+# are UI-related.
+
+import json
+import os
+
+from . import ExtensionModule
+from .. import dependencies
+from .. import mlog
+from ..mesonlib import Popen_safe, MesonException
+from ..programs import ExternalProgram
+
+class DlangModule(ExtensionModule):
+ class_dubbin = None
+ init_dub = False
+
+ def __init__(self, interpreter):
+ super().__init__(interpreter)
+ self.methods.update({
+ 'generate_dub_file': self.generate_dub_file,
+ })
+
+ def _init_dub(self):
+ if DlangModule.class_dubbin is None:
+ self.dubbin = dependencies.DubDependency.class_dubbin
+ DlangModule.class_dubbin = self.dubbin
+ else:
+ self.dubbin = DlangModule.class_dubbin
+
+ if DlangModule.class_dubbin is None:
+ self.dubbin = self.check_dub()
+ DlangModule.class_dubbin = self.dubbin
+ else:
+ self.dubbin = DlangModule.class_dubbin
+
+ if not self.dubbin:
+ if not self.dubbin:
+ raise MesonException('DUB not found.')
+
+ def generate_dub_file(self, state, args, kwargs):
+ if not DlangModule.init_dub:
+ self._init_dub()
+
+ if len(args) < 2:
+ raise MesonException('Missing arguments')
+
+ config = {
+ 'name': args[0]
+ }
+
+ config_path = os.path.join(args[1], 'dub.json')
+ if os.path.exists(config_path):
+ with open(config_path, encoding='utf-8') as ofile:
+ try:
+ config = json.load(ofile)
+ except ValueError:
+ mlog.warning('Failed to load the data in dub.json')
+
+ warn_publishing = ['description', 'license']
+ for arg in warn_publishing:
+ if arg not in kwargs and \
+ arg not in config:
+ mlog.warning('Without', mlog.bold(arg), 'the DUB package can\'t be published')
+
+ for key, value in kwargs.items():
+ if key == 'dependencies':
+ config[key] = {}
+ if isinstance(value, list):
+ for dep in value:
+ if isinstance(dep, dependencies.Dependency):
+ name = dep.get_name()
+ ret, res = self._call_dubbin(['describe', name])
+ if ret == 0:
+ version = dep.get_version()
+ if version is None:
+ config[key][name] = ''
+ else:
+ config[key][name] = version
+ elif isinstance(value, dependencies.Dependency):
+ name = value.get_name()
+ ret, res = self._call_dubbin(['describe', name])
+ if ret == 0:
+ version = value.get_version()
+ if version is None:
+ config[key][name] = ''
+ else:
+ config[key][name] = version
+ else:
+ config[key] = value
+
+ with open(config_path, 'w', encoding='utf-8') as ofile:
+ ofile.write(json.dumps(config, indent=4, ensure_ascii=False))
+
+ def _call_dubbin(self, args, env=None):
+ p, out = Popen_safe(self.dubbin.get_command() + args, env=env)[0:2]
+ return p.returncode, out.strip()
+
+ def check_dub(self):
+ dubbin = ExternalProgram('dub', silent=True)
+ if dubbin.found():
+ try:
+ p, out = Popen_safe(dubbin.get_command() + ['--version'])[0:2]
+ if p.returncode != 0:
+ mlog.warning('Found dub {!r} but couldn\'t run it'
+ ''.format(' '.join(dubbin.get_command())))
+ # Set to False instead of None to signify that we've already
+ # searched for it and not found it
+ dubbin = False
+ except (FileNotFoundError, PermissionError):
+ dubbin = False
+ else:
+ dubbin = False
+ if dubbin:
+ mlog.log('Found DUB:', mlog.bold(dubbin.get_path()),
+ '(%s)' % out.strip())
+ else:
+ mlog.log('Found DUB:', mlog.red('NO'))
+ return dubbin
+
+def initialize(*args, **kwargs):
+ return DlangModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/fs.py b/meson/mesonbuild/modules/fs.py
new file mode 100644
index 000000000..ab3aae2b1
--- /dev/null
+++ b/meson/mesonbuild/modules/fs.py
@@ -0,0 +1,258 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import typing as T
+import hashlib
+import os
+from pathlib import Path, PurePath, PureWindowsPath
+
+from .. import mlog
+from . import ExtensionModule
+from ..mesonlib import (
+ File,
+ FileOrString,
+ MesonException,
+ path_is_in_root,
+)
+from ..interpreterbase import FeatureNew, KwargInfo, typed_kwargs, typed_pos_args, noKwargs
+
+if T.TYPE_CHECKING:
+ from . import ModuleState
+ from ..interpreter import Interpreter
+
+ from typing_extensions import TypedDict
+
+ class ReadKwArgs(TypedDict):
+ """Keyword Arguments for fs.read."""
+
+ encoding: str
+
+
+class FSModule(ExtensionModule):
+
+ def __init__(self, interpreter: 'Interpreter') -> None:
+ super().__init__(interpreter)
+ self.methods.update({
+ 'expanduser': self.expanduser,
+ 'is_absolute': self.is_absolute,
+ 'as_posix': self.as_posix,
+ 'exists': self.exists,
+ 'is_symlink': self.is_symlink,
+ 'is_file': self.is_file,
+ 'is_dir': self.is_dir,
+ 'hash': self.hash,
+ 'size': self.size,
+ 'is_samepath': self.is_samepath,
+ 'replace_suffix': self.replace_suffix,
+ 'parent': self.parent,
+ 'name': self.name,
+ 'stem': self.stem,
+ 'read': self.read,
+ })
+
+ def _absolute_dir(self, state: 'ModuleState', arg: 'FileOrString') -> Path:
+ """
+ make an absolute path from a relative path, WITHOUT resolving symlinks
+ """
+ if isinstance(arg, File):
+ return Path(arg.absolute_path(state.source_root, self.interpreter.environment.get_build_dir()))
+ return Path(state.source_root) / Path(state.subdir) / Path(arg).expanduser()
+
+ def _resolve_dir(self, state: 'ModuleState', arg: 'FileOrString') -> Path:
+ """
+ resolves symlinks and makes absolute a directory relative to calling meson.build,
+ if not already absolute
+ """
+ path = self._absolute_dir(state, arg)
+ try:
+ # accommodate unresolvable paths e.g. symlink loops
+ path = path.resolve()
+ except Exception:
+ # return the best we could do
+ pass
+ return path
+
+ @noKwargs
+ @FeatureNew('fs.expanduser', '0.54.0')
+ @typed_pos_args('fs.expanduser', str)
+ def expanduser(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str:
+ return str(Path(args[0]).expanduser())
+
+ @noKwargs
+ @FeatureNew('fs.is_absolute', '0.54.0')
+ @typed_pos_args('fs.is_absolute', (str, File))
+ def is_absolute(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool:
+ if isinstance(args[0], File):
+ FeatureNew('fs.is_absolute_file', '0.59.0').use(state.subproject)
+ return PurePath(str(args[0])).is_absolute()
+
+ @noKwargs
+ @FeatureNew('fs.as_posix', '0.54.0')
+ @typed_pos_args('fs.as_posix', str)
+ def as_posix(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str:
+ """
+ this function assumes you are passing a Windows path, even if on a Unix-like system
+ and so ALL '\' are turned to '/', even if you meant to escape a character
+ """
+ return PureWindowsPath(args[0]).as_posix()
+
+ @noKwargs
+ @typed_pos_args('fs.exists', str)
+ def exists(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
+ return self._resolve_dir(state, args[0]).exists()
+
+ @noKwargs
+ @typed_pos_args('fs.is_symlink', (str, File))
+ def is_symlink(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool:
+ if isinstance(args[0], File):
+ FeatureNew('fs.is_symlink_file', '0.59.0').use(state.subproject)
+ return self._absolute_dir(state, args[0]).is_symlink()
+
+ @noKwargs
+ @typed_pos_args('fs.is_file', str)
+ def is_file(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
+ return self._resolve_dir(state, args[0]).is_file()
+
+ @noKwargs
+ @typed_pos_args('fs.is_dir', str)
+ def is_dir(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
+ return self._resolve_dir(state, args[0]).is_dir()
+
+ @noKwargs
+ @typed_pos_args('fs.hash', (str, File), str)
+ def hash(self, state: 'ModuleState', args: T.Tuple['FileOrString', str], kwargs: T.Dict[str, T.Any]) -> str:
+ if isinstance(args[0], File):
+ FeatureNew('fs.hash_file', '0.59.0').use(state.subproject)
+ file = self._resolve_dir(state, args[0])
+ if not file.is_file():
+ raise MesonException(f'{file} is not a file and therefore cannot be hashed')
+ try:
+ h = hashlib.new(args[1])
+ except ValueError:
+ raise MesonException('hash algorithm {} is not available'.format(args[1]))
+ mlog.debug('computing {} sum of {} size {} bytes'.format(args[1], file, file.stat().st_size))
+ h.update(file.read_bytes())
+ return h.hexdigest()
+
+ @noKwargs
+ @typed_pos_args('fs.size', (str, File))
+ def size(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> int:
+ if isinstance(args[0], File):
+ FeatureNew('fs.size_file', '0.59.0').use(state.subproject)
+ file = self._resolve_dir(state, args[0])
+ if not file.is_file():
+ raise MesonException(f'{file} is not a file and therefore cannot be sized')
+ try:
+ return file.stat().st_size
+ except ValueError:
+ raise MesonException('{} size could not be determined'.format(args[0]))
+
+ @noKwargs
+ @typed_pos_args('fs.is_samepath', (str, File), (str, File))
+ def is_samepath(self, state: 'ModuleState', args: T.Tuple['FileOrString', 'FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool:
+ if isinstance(args[0], File) or isinstance(args[1], File):
+ FeatureNew('fs.is_samepath_file', '0.59.0').use(state.subproject)
+ file1 = self._resolve_dir(state, args[0])
+ file2 = self._resolve_dir(state, args[1])
+ if not file1.exists():
+ return False
+ if not file2.exists():
+ return False
+ try:
+ return file1.samefile(file2)
+ except OSError:
+ return False
+
+ @noKwargs
+ @typed_pos_args('fs.replace_suffix', (str, File), str)
+ def replace_suffix(self, state: 'ModuleState', args: T.Tuple['FileOrString', str], kwargs: T.Dict[str, T.Any]) -> str:
+ if isinstance(args[0], File):
+ FeatureNew('fs.replace_suffix_file', '0.59.0').use(state.subproject)
+ original = PurePath(str(args[0]))
+ new = original.with_suffix(args[1])
+ return str(new)
+
+ @noKwargs
+ @typed_pos_args('fs.parent', (str, File))
+ def parent(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> str:
+ if isinstance(args[0], File):
+ FeatureNew('fs.parent_file', '0.59.0').use(state.subproject)
+ original = PurePath(str(args[0]))
+ new = original.parent
+ return str(new)
+
+ @noKwargs
+ @typed_pos_args('fs.name', (str, File))
+ def name(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> str:
+ if isinstance(args[0], File):
+ FeatureNew('fs.name_file', '0.59.0').use(state.subproject)
+ original = PurePath(str(args[0]))
+ new = original.name
+ return str(new)
+
+ @noKwargs
+ @typed_pos_args('fs.stem', (str, File))
+ @FeatureNew('fs.stem', '0.54.0')
+ def stem(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> str:
+ if isinstance(args[0], File):
+ FeatureNew('fs.stem_file', '0.59.0').use(state.subproject)
+ original = PurePath(str(args[0]))
+ new = original.stem
+ return str(new)
+
+ @FeatureNew('fs.read', '0.57.0')
+ @typed_pos_args('fs.read', (str, File))
+ @typed_kwargs('fs.read', KwargInfo('encoding', str, default='utf-8'))
+ def read(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: 'ReadKwArgs') -> str:
+ """Read a file from the source tree and return its value as a decoded
+ string.
+
+ If the encoding is not specified, the file is assumed to be utf-8
+ encoded. Paths must be relative by default (to prevent accidents) and
+ are forbidden to be read from the build directory (to prevent build
+ loops)
+ """
+ path = args[0]
+ encoding = kwargs['encoding']
+ src_dir = self.interpreter.environment.source_dir
+ sub_dir = self.interpreter.subdir
+ build_dir = self.interpreter.environment.get_build_dir()
+
+ if isinstance(path, File):
+ if path.is_built:
+ raise MesonException(
+ 'fs.read_file does not accept built files() objects')
+ path = os.path.join(src_dir, path.relative_name())
+ else:
+ if sub_dir:
+ src_dir = os.path.join(src_dir, sub_dir)
+ path = os.path.join(src_dir, path)
+
+ path = os.path.abspath(path)
+ if path_is_in_root(Path(path), Path(build_dir), resolve=True):
+ raise MesonException('path must not be in the build tree')
+ try:
+ with open(path, encoding=encoding) as f:
+ data = f.read()
+ except UnicodeDecodeError:
+ raise MesonException(f'decoding failed for {path}')
+ # Reconfigure when this file changes as it can contain data used by any
+ # part of the build configuration (e.g. `project(..., version:
+ # fs.read_file('VERSION')` or `configure_file(...)`
+ self.interpreter.add_build_def_file(path)
+ return data
+
+
+def initialize(*args: T.Any, **kwargs: T.Any) -> FSModule:
+ return FSModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/gnome.py b/meson/mesonbuild/modules/gnome.py
new file mode 100644
index 000000000..881e4240e
--- /dev/null
+++ b/meson/mesonbuild/modules/gnome.py
@@ -0,0 +1,1812 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''This module provides helper functions for Gnome/GLib related
+functionality such as gobject-introspection, gresources and gtk-doc'''
+
+import os
+import copy
+import subprocess
+import functools
+import typing as T
+
+from .. import build
+from .. import mlog
+from .. import mesonlib
+from .. import interpreter
+from . import GResourceTarget, GResourceHeaderTarget, GirTarget, TypelibTarget, VapiTarget
+from . import ExtensionModule
+from . import ModuleReturnValue
+from ..mesonlib import (
+ MachineChoice, MesonException, OrderedSet, Popen_safe, extract_as_list,
+ join_args, HoldableObject
+)
+from ..dependencies import Dependency, PkgConfigDependency, InternalDependency
+from ..interpreterbase import noPosargs, noKwargs, permittedKwargs, FeatureNew, FeatureNewKwargs, FeatureDeprecatedKwargs
+from ..interpreterbase import typed_kwargs, KwargInfo, ContainerTypeInfo
+from ..programs import ExternalProgram, OverrideProgram
+from ..build import CustomTarget, CustomTargetIndex, GeneratedList
+
+if T.TYPE_CHECKING:
+ from ..compilers import Compiler
+ from ..interpreter import Interpreter
+
+# gresource compilation is broken due to the way
+# the resource compiler and Ninja clash about it
+#
+# https://github.com/ninja-build/ninja/issues/1184
+# https://bugzilla.gnome.org/show_bug.cgi?id=774368
+gresource_dep_needed_version = '>= 2.51.1'
+
+native_glib_version = None
+
+class GnomeModule(ExtensionModule):
+ def __init__(self, interpreter: 'Interpreter') -> None:
+ super().__init__(interpreter)
+ self.gir_dep = None
+ self.install_glib_compile_schemas = False
+ self.install_gio_querymodules = []
+ self.install_gtk_update_icon_cache = False
+ self.install_update_desktop_database = False
+ self.devenv = None
+ self.methods.update({
+ 'post_install': self.post_install,
+ 'compile_resources': self.compile_resources,
+ 'generate_gir': self.generate_gir,
+ 'compile_schemas': self.compile_schemas,
+ 'yelp': self.yelp,
+ 'gtkdoc': self.gtkdoc,
+ 'gtkdoc_html_dir': self.gtkdoc_html_dir,
+ 'gdbus_codegen': self.gdbus_codegen,
+ 'mkenums': self.mkenums,
+ 'mkenums_simple': self.mkenums_simple,
+ 'genmarshal': self.genmarshal,
+ 'generate_vapi': self.generate_vapi,
+ })
+
+ @staticmethod
+ def _get_native_glib_version(state):
+ global native_glib_version
+ if native_glib_version is None:
+ glib_dep = PkgConfigDependency('glib-2.0', state.environment,
+ {'native': True, 'required': False})
+ if glib_dep.found():
+ native_glib_version = glib_dep.get_version()
+ else:
+ mlog.warning('Could not detect glib version, assuming 2.54. '
+ 'You may get build errors if your glib is older.')
+ native_glib_version = '2.54'
+ return native_glib_version
+
+ @mesonlib.run_once
+ def __print_gresources_warning(self, state):
+ if not mesonlib.version_compare(self._get_native_glib_version(state),
+ gresource_dep_needed_version):
+ mlog.warning('GLib compiled dependencies do not work reliably with \n'
+ 'the current version of GLib. See the following upstream issue:',
+ mlog.bold('https://bugzilla.gnome.org/show_bug.cgi?id=774368'))
+
+ @staticmethod
+ def _print_gdbus_warning():
+ mlog.warning('Code generated with gdbus_codegen() requires the root directory be added to\n'
+ ' include_directories of targets with GLib < 2.51.3:',
+ mlog.bold('https://github.com/mesonbuild/meson/issues/1387'),
+ once=True)
+
+ def _get_dep(self, state, depname, native=False, required=True):
+ kwargs = {'native': native, 'required': required}
+ return self.interpreter.func_dependency(state.current_node, [depname], kwargs)
+
+ def _get_native_binary(self, state, name, depname, varname, required=True):
+ # Look in overrides in case glib/gtk/etc are built as subproject
+ prog = self.interpreter.program_from_overrides([name], [])
+ if prog is not None:
+ return prog
+
+ # Look in machine file
+ prog = state.environment.lookup_binary_entry(MachineChoice.HOST, name)
+ if prog is not None:
+ return ExternalProgram.from_entry(name, prog)
+
+ # Check if pkgconfig has a variable
+ dep = self._get_dep(state, depname, native=True, required=False)
+ if dep.found() and dep.type_name == 'pkgconfig':
+ value = dep.get_pkgconfig_variable(varname, {})
+ if value:
+ return ExternalProgram(name, value)
+
+ # Normal program lookup
+ return state.find_program(name, required=required)
+
+ @typed_kwargs('gnome.post_install',
+ KwargInfo('glib_compile_schemas', bool, default=False),
+ KwargInfo('gio_querymodules', ContainerTypeInfo(list, str), default=[], listify=True),
+ KwargInfo('gtk_update_icon_cache', bool, default=False),
+ KwargInfo('update_desktop_database', bool, default=False, since='0.59.0'),
+ )
+ @noPosargs
+ @FeatureNew('gnome.post_install', '0.57.0')
+ def post_install(self, state, args, kwargs):
+ rv = []
+ datadir_abs = os.path.join(state.environment.get_prefix(), state.environment.get_datadir())
+ if kwargs['glib_compile_schemas'] and not self.install_glib_compile_schemas:
+ self.install_glib_compile_schemas = True
+ prog = self._get_native_binary(state, 'glib-compile-schemas', 'gio-2.0', 'glib_compile_schemas')
+ schemasdir = os.path.join(datadir_abs, 'glib-2.0', 'schemas')
+ script = state.backend.get_executable_serialisation([prog, schemasdir])
+ script.skip_if_destdir = True
+ rv.append(script)
+ for d in kwargs['gio_querymodules']:
+ if d not in self.install_gio_querymodules:
+ self.install_gio_querymodules.append(d)
+ prog = self._get_native_binary(state, 'gio-querymodules', 'gio-2.0', 'gio_querymodules')
+ moduledir = os.path.join(state.environment.get_prefix(), d)
+ script = state.backend.get_executable_serialisation([prog, moduledir])
+ script.skip_if_destdir = True
+ rv.append(script)
+ if kwargs['gtk_update_icon_cache'] and not self.install_gtk_update_icon_cache:
+ self.install_gtk_update_icon_cache = True
+ prog = self._get_native_binary(state, 'gtk4-update-icon-cache', 'gtk-4.0', 'gtk4_update_icon_cache', required=False)
+ found = isinstance(prog, build.Executable) or prog.found()
+ if not found:
+ prog = self._get_native_binary(state, 'gtk-update-icon-cache', 'gtk+-3.0', 'gtk_update_icon_cache')
+ icondir = os.path.join(datadir_abs, 'icons', 'hicolor')
+ script = state.backend.get_executable_serialisation([prog, '-q', '-t' ,'-f', icondir])
+ script.skip_if_destdir = True
+ rv.append(script)
+ if kwargs['update_desktop_database'] and not self.install_update_desktop_database:
+ self.install_update_desktop_database = True
+ prog = self._get_native_binary(state, 'update-desktop-database', 'desktop-file-utils', 'update_desktop_database')
+ appdir = os.path.join(datadir_abs, 'applications')
+ script = state.backend.get_executable_serialisation([prog, '-q', appdir])
+ script.skip_if_destdir = True
+ rv.append(script)
+ return ModuleReturnValue(None, rv)
+
+ @FeatureNewKwargs('gnome.compile_resources', '0.37.0', ['gresource_bundle', 'export', 'install_header'])
+ @permittedKwargs({'source_dir', 'c_name', 'dependencies', 'export', 'gresource_bundle', 'install_header',
+ 'install', 'install_dir', 'extra_args', 'build_by_default'})
+ def compile_resources(self, state, args, kwargs):
+ self.__print_gresources_warning(state)
+ glib_version = self._get_native_glib_version(state)
+
+ glib_compile_resources = state.find_program('glib-compile-resources')
+ cmd = [glib_compile_resources, '@INPUT@']
+
+ source_dirs, dependencies = [mesonlib.extract_as_list(kwargs, c, pop=True) for c in ['source_dir', 'dependencies']]
+
+ if len(args) < 2:
+ raise MesonException('Not enough arguments; the name of the resource '
+ 'and the path to the XML file are required')
+
+ # Validate dependencies
+ subdirs = []
+ depends = []
+ for (ii, dep) in enumerate(dependencies):
+ if isinstance(dep, mesonlib.File):
+ subdirs.append(dep.subdir)
+ elif isinstance(dep, (build.CustomTarget, build.CustomTargetIndex)):
+ depends.append(dep)
+ subdirs.append(dep.get_subdir())
+ if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
+ m = 'The "dependencies" argument of gnome.compile_resources() can not\n' \
+ 'be used with the current version of glib-compile-resources due to\n' \
+ '<https://bugzilla.gnome.org/show_bug.cgi?id=774368>'
+ raise MesonException(m)
+ else:
+ m = 'Unexpected dependency type {!r} for gnome.compile_resources() ' \
+ '"dependencies" argument.\nPlease pass the return value of ' \
+ 'custom_target() or configure_file()'
+ raise MesonException(m.format(dep))
+
+ if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
+ ifile = args[1]
+ if isinstance(ifile, mesonlib.File):
+ # glib-compile-resources will be run inside the source dir,
+ # so we need either 'src_to_build' or the absolute path.
+ # Absolute path is the easiest choice.
+ if ifile.is_built:
+ ifile = os.path.join(state.environment.get_build_dir(), ifile.subdir, ifile.fname)
+ else:
+ ifile = os.path.join(ifile.subdir, ifile.fname)
+ elif isinstance(ifile, str):
+ ifile = os.path.join(state.subdir, ifile)
+ elif isinstance(ifile, (build.CustomTarget,
+ build.CustomTargetIndex,
+ build.GeneratedList)):
+ m = 'Resource xml files generated at build-time cannot be used ' \
+ 'with gnome.compile_resources() because we need to scan ' \
+ 'the xml for dependencies. Use configure_file() instead ' \
+ 'to generate it at configure-time.'
+ raise MesonException(m)
+ else:
+ raise MesonException(f'Invalid file argument: {ifile!r}')
+ depend_files, depends, subdirs = self._get_gresource_dependencies(
+ state, ifile, source_dirs, dependencies)
+
+ # Make source dirs relative to build dir now
+ source_dirs = [os.path.join(state.build_to_src, state.subdir, d) for d in source_dirs]
+ # Ensure build directories of generated deps are included
+ source_dirs += subdirs
+ # Always include current directory, but after paths set by user
+ source_dirs.append(os.path.join(state.build_to_src, state.subdir))
+
+ for source_dir in OrderedSet(source_dirs):
+ cmd += ['--sourcedir', source_dir]
+
+ if 'c_name' in kwargs:
+ cmd += ['--c-name', kwargs.pop('c_name')]
+ export = kwargs.pop('export', False)
+ if not export:
+ cmd += ['--internal']
+
+ cmd += ['--generate', '--target', '@OUTPUT@']
+
+ cmd += mesonlib.stringlistify(kwargs.pop('extra_args', []))
+
+ gresource = kwargs.pop('gresource_bundle', False)
+ if gresource:
+ output = args[0] + '.gresource'
+ name = args[0] + '_gresource'
+ else:
+ if 'c' in state.environment.coredata.compilers.host.keys():
+ output = args[0] + '.c'
+ name = args[0] + '_c'
+ elif 'cpp' in state.environment.coredata.compilers.host.keys():
+ output = args[0] + '.cpp'
+ name = args[0] + '_cpp'
+ else:
+ raise MesonException('Compiling GResources into code is only supported in C and C++ projects')
+
+ if kwargs.get('install', False) and not gresource:
+ raise MesonException('The install kwarg only applies to gresource bundles, see install_header')
+
+ install_header = kwargs.pop('install_header', False)
+ if install_header and gresource:
+ raise MesonException('The install_header kwarg does not apply to gresource bundles')
+ if install_header and not export:
+ raise MesonException('GResource header is installed yet export is not enabled')
+
+ kwargs['input'] = args[1]
+ kwargs['output'] = output
+ kwargs['depends'] = depends
+ if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
+ # This will eventually go out of sync if dependencies are added
+ kwargs['depend_files'] = depend_files
+ kwargs['command'] = cmd
+ else:
+ depfile = f'{output}.d'
+ kwargs['depfile'] = depfile
+ kwargs['command'] = copy.copy(cmd) + ['--dependency-file', '@DEPFILE@']
+ target_c = GResourceTarget(name, state.subdir, state.subproject, kwargs)
+
+ if gresource: # Only one target for .gresource files
+ return ModuleReturnValue(target_c, [target_c])
+
+ h_kwargs = {
+ 'command': cmd,
+ 'input': args[1],
+ 'output': args[0] + '.h',
+ # The header doesn't actually care about the files yet it errors if missing
+ 'depends': depends
+ }
+ if 'build_by_default' in kwargs:
+ h_kwargs['build_by_default'] = kwargs['build_by_default']
+ if install_header:
+ h_kwargs['install'] = install_header
+ h_kwargs['install_dir'] = kwargs.get('install_dir',
+ state.environment.coredata.get_option(mesonlib.OptionKey('includedir')))
+ target_h = GResourceHeaderTarget(args[0] + '_h', state.subdir, state.subproject, h_kwargs)
+ rv = [target_c, target_h]
+ return ModuleReturnValue(rv, rv)
+
+ def _get_gresource_dependencies(self, state, input_file, source_dirs, dependencies):
+
+ cmd = ['glib-compile-resources',
+ input_file,
+ '--generate-dependencies']
+
+ # Prefer generated files over source files
+ cmd += ['--sourcedir', state.subdir] # Current build dir
+ for source_dir in source_dirs:
+ cmd += ['--sourcedir', os.path.join(state.subdir, source_dir)]
+
+ try:
+ pc, stdout, stderr = Popen_safe(cmd, cwd=state.environment.get_source_dir())
+ except (FileNotFoundError, PermissionError):
+ raise MesonException('Could not execute glib-compile-resources.')
+ if pc.returncode != 0:
+ m = 'glib-compile-resources failed to get dependencies for {}:\n{}'
+ mlog.warning(m.format(cmd[1], stderr))
+ raise subprocess.CalledProcessError(pc.returncode, cmd)
+
+ dep_files = stdout.split('\n')[:-1]
+
+ depends = []
+ subdirs = []
+ for resfile in dep_files[:]:
+ resbasename = os.path.basename(resfile)
+ for dep in dependencies:
+ if isinstance(dep, mesonlib.File):
+ if dep.fname != resbasename:
+ continue
+ dep_files.remove(resfile)
+ dep_files.append(dep)
+ subdirs.append(dep.subdir)
+ break
+ elif isinstance(dep, (build.CustomTarget, build.CustomTargetIndex)):
+ fname = None
+ outputs = {(o, os.path.basename(o)) for o in dep.get_outputs()}
+ for o, baseo in outputs:
+ if baseo == resbasename:
+ fname = o
+ break
+ if fname is not None:
+ dep_files.remove(resfile)
+ depends.append(dep)
+ subdirs.append(dep.get_subdir())
+ break
+ else:
+ # In generate-dependencies mode, glib-compile-resources doesn't raise
+ # an error for missing resources but instead prints whatever filename
+ # was listed in the input file. That's good because it means we can
+ # handle resource files that get generated as part of the build, as
+ # follows.
+ #
+ # If there are multiple generated resource files with the same basename
+ # then this code will get confused.
+ try:
+ f = mesonlib.File.from_source_file(state.environment.get_source_dir(),
+ ".", resfile)
+ except MesonException:
+ raise MesonException(
+ 'Resource "%s" listed in "%s" was not found. If this is a '
+ 'generated file, pass the target that generates it to '
+ 'gnome.compile_resources() using the "dependencies" '
+ 'keyword argument.' % (resfile, input_file))
+ dep_files.remove(resfile)
+ dep_files.append(f)
+ return dep_files, depends, subdirs
+
+ def _get_link_args(self, state, lib, depends, include_rpath=False,
+ use_gir_args=False):
+ link_command = []
+ # Construct link args
+ if isinstance(lib, build.SharedLibrary):
+ libdir = os.path.join(state.environment.get_build_dir(), state.backend.get_target_dir(lib))
+ link_command.append('-L' + libdir)
+ if include_rpath:
+ link_command.append('-Wl,-rpath,' + libdir)
+ depends.append(lib)
+ # Needed for the following binutils bug:
+ # https://github.com/mesonbuild/meson/issues/1911
+ # However, g-ir-scanner does not understand -Wl,-rpath
+ # so we need to use -L instead
+ for d in state.backend.determine_rpath_dirs(lib):
+ d = os.path.join(state.environment.get_build_dir(), d)
+ link_command.append('-L' + d)
+ if include_rpath:
+ link_command.append('-Wl,-rpath,' + d)
+ if use_gir_args and self._gir_has_option('--extra-library'):
+ link_command.append('--extra-library=' + lib.name)
+ else:
+ link_command.append('-l' + lib.name)
+ return link_command
+
+ def _get_dependencies_flags(self, deps, state, depends, include_rpath=False,
+ use_gir_args=False, separate_nodedup=False):
+ cflags = OrderedSet()
+ internal_ldflags = OrderedSet()
+ external_ldflags = OrderedSet()
+ # External linker flags that can't be de-duped reliably because they
+ # require two args in order, such as -framework AVFoundation
+ external_ldflags_nodedup = []
+ gi_includes = OrderedSet()
+ deps = mesonlib.listify(deps)
+
+ for dep in deps:
+ if isinstance(dep, Dependency):
+ girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='')
+ if girdir:
+ gi_includes.update([girdir])
+ if isinstance(dep, InternalDependency):
+ cflags.update(dep.get_compile_args())
+ cflags.update(state.get_include_args(dep.include_directories))
+ for lib in dep.libraries:
+ if isinstance(lib, build.SharedLibrary):
+ internal_ldflags.update(self._get_link_args(state, lib, depends, include_rpath))
+ libdepflags = self._get_dependencies_flags(lib.get_external_deps(), state, depends, include_rpath,
+ use_gir_args, True)
+ cflags.update(libdepflags[0])
+ internal_ldflags.update(libdepflags[1])
+ external_ldflags.update(libdepflags[2])
+ external_ldflags_nodedup += libdepflags[3]
+ gi_includes.update(libdepflags[4])
+ extdepflags = self._get_dependencies_flags(dep.ext_deps, state, depends, include_rpath,
+ use_gir_args, True)
+ cflags.update(extdepflags[0])
+ internal_ldflags.update(extdepflags[1])
+ external_ldflags.update(extdepflags[2])
+ external_ldflags_nodedup += extdepflags[3]
+ gi_includes.update(extdepflags[4])
+ for source in dep.sources:
+ if isinstance(source, GirTarget):
+ gi_includes.update([os.path.join(state.environment.get_build_dir(),
+ source.get_subdir())])
+ # This should be any dependency other than an internal one.
+ elif isinstance(dep, Dependency):
+ cflags.update(dep.get_compile_args())
+ ldflags = iter(dep.get_link_args(raw=True))
+ for lib in ldflags:
+ if (os.path.isabs(lib) and
+ # For PkgConfigDependency only:
+ getattr(dep, 'is_libtool', False)):
+ lib_dir = os.path.dirname(lib)
+ external_ldflags.update(["-L%s" % lib_dir])
+ if include_rpath:
+ external_ldflags.update([f'-Wl,-rpath {lib_dir}'])
+ libname = os.path.basename(lib)
+ if libname.startswith("lib"):
+ libname = libname[3:]
+ libname = libname.split(".so")[0]
+ lib = "-l%s" % libname
+ # FIXME: Hack to avoid passing some compiler options in
+ if lib.startswith("-W"):
+ continue
+ # If it's a framework arg, slurp the framework name too
+ # to preserve the order of arguments
+ if lib == '-framework':
+ external_ldflags_nodedup += [lib, next(ldflags)]
+ else:
+ external_ldflags.update([lib])
+ elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)):
+ cflags.update(state.get_include_args(dep.get_include_dirs()))
+ depends.append(dep)
+ else:
+ mlog.log(f'dependency {dep!r} not handled to build gir files')
+ continue
+
+ if use_gir_args and self._gir_has_option('--extra-library'):
+ def fix_ldflags(ldflags):
+ fixed_ldflags = OrderedSet()
+ for ldflag in ldflags:
+ if ldflag.startswith("-l"):
+ ldflag = ldflag.replace('-l', '--extra-library=', 1)
+ fixed_ldflags.add(ldflag)
+ return fixed_ldflags
+ internal_ldflags = fix_ldflags(internal_ldflags)
+ external_ldflags = fix_ldflags(external_ldflags)
+ if not separate_nodedup:
+ external_ldflags.update(external_ldflags_nodedup)
+ return cflags, internal_ldflags, external_ldflags, gi_includes
+ else:
+ return cflags, internal_ldflags, external_ldflags, external_ldflags_nodedup, gi_includes
+
+ def _unwrap_gir_target(self, girtarget, state):
+ if not isinstance(girtarget, (build.Executable, build.SharedLibrary,
+ build.StaticLibrary)):
+ raise MesonException(f'Gir target must be an executable or library but is "{girtarget}" of type {type(girtarget).__name__}')
+
+ STATIC_BUILD_REQUIRED_VERSION = ">=1.58.1"
+ if isinstance(girtarget, (build.StaticLibrary)) and \
+ not mesonlib.version_compare(
+ self._get_gir_dep(state)[0].get_version(),
+ STATIC_BUILD_REQUIRED_VERSION):
+ raise MesonException('Static libraries can only be introspected with GObject-Introspection ' + STATIC_BUILD_REQUIRED_VERSION)
+
+ return girtarget
+
+ def _devenv_append(self, varname: str, value: str) -> None:
+ if self.devenv is None:
+ self.devenv = build.EnvironmentVariables()
+ self.interpreter.build.devenv.append(self.devenv)
+ self.devenv.append(varname, [value])
+
+ def _get_gir_dep(self, state):
+ if not self.gir_dep:
+ self.gir_dep = self._get_dep(state, 'gobject-introspection-1.0')
+ self.giscanner = self._get_native_binary(state, 'g-ir-scanner', 'gobject-introspection-1.0', 'g_ir_scanner')
+ self.gicompiler = self._get_native_binary(state, 'g-ir-compiler', 'gobject-introspection-1.0', 'g_ir_compiler')
+ return self.gir_dep, self.giscanner, self.gicompiler
+
+ @functools.lru_cache(maxsize=None)
+ def _gir_has_option(self, option) -> bool:
+ exe = self.giscanner
+ if isinstance(exe, OverrideProgram):
+ # Handle overridden g-ir-scanner
+ assert option in ['--extra-library', '--sources-top-dirs']
+ return True
+ p, o, e = Popen_safe(exe.get_command() + ['--help'], stderr=subprocess.STDOUT)
+ return p.returncode == 0 and option in o
+
+ def _scan_header(self, kwargs):
+ ret = []
+ header = kwargs.pop('header', None)
+ if header:
+ if not isinstance(header, str):
+ raise MesonException('header must be a string')
+ ret = ['--c-include=' + header]
+ return ret
+
+ def _scan_extra_args(self, kwargs):
+ return mesonlib.stringlistify(kwargs.pop('extra_args', []))
+
+ def _scan_link_withs(self, state, depends, kwargs):
+ ret = []
+ if 'link_with' in kwargs:
+ link_with = mesonlib.extract_as_list(kwargs, 'link_with', pop = True)
+
+ for link in link_with:
+ ret += self._get_link_args(state, link, depends,
+ use_gir_args=True)
+ return ret
+
+ # May mutate depends and gir_inc_dirs
+ def _scan_include(self, state, depends, gir_inc_dirs, kwargs):
+ ret = []
+
+ if 'includes' in kwargs:
+ includes = mesonlib.extract_as_list(kwargs, 'includes', pop = True)
+ for inc in includes:
+ if isinstance(inc, str):
+ ret += [f'--include={inc}']
+ elif isinstance(inc, GirTarget):
+ gir_inc_dirs += [
+ os.path.join(state.environment.get_build_dir(),
+ inc.get_subdir()),
+ ]
+ ret += [
+ "--include-uninstalled={}".format(os.path.join(inc.get_subdir(), inc.get_basename()))
+ ]
+ depends += [inc]
+ else:
+ raise MesonException(
+ 'Gir includes must be str, GirTarget, or list of them. '
+ 'Got %s.' % type(inc).__name__)
+
+ return ret
+
+ def _scan_symbol_prefix(self, kwargs):
+ ret = []
+
+ if 'symbol_prefix' in kwargs:
+ sym_prefixes = mesonlib.stringlistify(kwargs.pop('symbol_prefix', []))
+ ret += ['--symbol-prefix=%s' % sym_prefix for sym_prefix in sym_prefixes]
+
+ return ret
+
+ def _scan_identifier_prefix(self, kwargs):
+ ret = []
+
+ if 'identifier_prefix' in kwargs:
+ identifier_prefix = kwargs.pop('identifier_prefix')
+ if not isinstance(identifier_prefix, str):
+ raise MesonException('Gir identifier prefix must be str')
+ ret += ['--identifier-prefix=%s' % identifier_prefix]
+
+ return ret
+
+ def _scan_export_packages(self, kwargs):
+ ret = []
+
+ if 'export_packages' in kwargs:
+ pkgs = kwargs.pop('export_packages')
+ if isinstance(pkgs, str):
+ ret += ['--pkg-export=%s' % pkgs]
+ elif isinstance(pkgs, list):
+ ret += ['--pkg-export=%s' % pkg for pkg in pkgs]
+ else:
+ raise MesonException('Gir export packages must be str or list')
+
+ return ret
+
+ def _scan_inc_dirs(self, kwargs):
+ ret = mesonlib.extract_as_list(kwargs, 'include_directories', pop = True)
+ for incd in ret:
+ if not isinstance(incd, (str, build.IncludeDirs)):
+ raise MesonException(
+ 'Gir include dirs should be include_directories().')
+ return ret
+
+ def _scan_langs(self, state, langs):
+ ret = []
+
+ for lang in langs:
+ link_args = state.environment.coredata.get_external_link_args(MachineChoice.HOST, lang)
+ for link_arg in link_args:
+ if link_arg.startswith('-L'):
+ ret.append(link_arg)
+
+ return ret
+
+ def _scan_gir_targets(self, state, girtargets):
+ ret = []
+
+ for girtarget in girtargets:
+ if isinstance(girtarget, build.Executable):
+ ret += ['--program', girtarget]
+ else:
+ # Because of https://gitlab.gnome.org/GNOME/gobject-introspection/merge_requests/72
+ # we can't use the full path until this is merged.
+ libpath = os.path.join(girtarget.get_subdir(), girtarget.get_filename())
+ # Must use absolute paths here because g-ir-scanner will not
+ # add them to the runtime path list if they're relative. This
+ # means we cannot use @BUILD_ROOT@
+ build_root = state.environment.get_build_dir()
+ if isinstance(girtarget, build.SharedLibrary):
+ # need to put our output directory first as we need to use the
+ # generated libraries instead of any possibly installed system/prefix
+ # ones.
+ ret += ["-L{}/{}".format(build_root, os.path.dirname(libpath))]
+ libname = girtarget.get_basename()
+ else:
+ libname = os.path.join(f"{build_root}/{libpath}")
+ ret += ['--library', libname]
+ # Needed for the following binutils bug:
+ # https://github.com/mesonbuild/meson/issues/1911
+ # However, g-ir-scanner does not understand -Wl,-rpath
+ # so we need to use -L instead
+ for d in state.backend.determine_rpath_dirs(girtarget):
+ d = os.path.join(state.environment.get_build_dir(), d)
+ ret.append('-L' + d)
+
+ return ret
+
+ def _get_girtargets_langs_compilers(self, girtargets: T.List[GirTarget]) -> T.List[T.Tuple[str, 'Compiler']]:
+ ret: T.List[T.Tuple[str, 'Compiler']] = []
+ for girtarget in girtargets:
+ for lang, compiler in girtarget.compilers.items():
+ # XXX: Can you use g-i with any other language?
+ if lang in ('c', 'cpp', 'objc', 'objcpp', 'd'):
+ ret.append((lang, compiler))
+ break
+
+ return ret
+
+ def _get_gir_targets_deps(self, girtargets):
+ ret = []
+ for girtarget in girtargets:
+ ret += girtarget.get_all_link_deps()
+ ret += girtarget.get_external_deps()
+ return ret
+
+ def _get_gir_targets_inc_dirs(self, girtargets):
+ ret = []
+ for girtarget in girtargets:
+ ret += girtarget.get_include_dirs()
+ return ret
+
+ def _get_langs_compilers_flags(self, state, langs_compilers: T.List[T.Tuple[str, 'Compiler']]):
+ cflags = []
+ internal_ldflags = []
+ external_ldflags = []
+
+ for lang, compiler in langs_compilers:
+ if state.global_args.get(lang):
+ cflags += state.global_args[lang]
+ if state.project_args.get(lang):
+ cflags += state.project_args[lang]
+ if mesonlib.OptionKey('b_sanitize') in compiler.base_options:
+ sanitize = state.environment.coredata.options[mesonlib.OptionKey('b_sanitize')].value
+ cflags += compiler.sanitizer_compile_args(sanitize)
+ sanitize = sanitize.split(',')
+ # These must be first in ldflags
+ if 'address' in sanitize:
+ internal_ldflags += ['-lasan']
+ if 'thread' in sanitize:
+ internal_ldflags += ['-ltsan']
+ if 'undefined' in sanitize:
+ internal_ldflags += ['-lubsan']
+ # FIXME: Linking directly to lib*san is not recommended but g-ir-scanner
+ # does not understand -f LDFLAGS. https://bugzilla.gnome.org/show_bug.cgi?id=783892
+ # ldflags += compiler.sanitizer_link_args(sanitize)
+
+ return cflags, internal_ldflags, external_ldflags
+
+ def _make_gir_filelist(self, state, srcdir, ns, nsversion, girtargets, libsources):
+ gir_filelist_dir = state.backend.get_target_private_dir_abs(girtargets[0])
+ if not os.path.isdir(gir_filelist_dir):
+ os.mkdir(gir_filelist_dir)
+ gir_filelist_filename = os.path.join(gir_filelist_dir, f'{ns}_{nsversion}_gir_filelist')
+
+ with open(gir_filelist_filename, 'w', encoding='utf-8') as gir_filelist:
+ for s in libsources:
+ if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)):
+ for custom_output in s.get_outputs():
+ gir_filelist.write(os.path.join(state.environment.get_build_dir(),
+ state.backend.get_target_dir(s),
+ custom_output) + '\n')
+ elif isinstance(s, mesonlib.File):
+ gir_filelist.write(s.rel_to_builddir(state.build_to_src) + '\n')
+ elif isinstance(s, build.GeneratedList):
+ for gen_src in s.get_outputs():
+ gir_filelist.write(os.path.join(srcdir, gen_src) + '\n')
+ else:
+ gir_filelist.write(os.path.join(srcdir, s) + '\n')
+
+ return gir_filelist_filename
+
+ def _make_gir_target(self, state, girfile, scan_command, generated_files, depends, kwargs):
+ scankwargs = {'input': generated_files,
+ 'output': girfile,
+ 'command': scan_command,
+ 'depends': depends}
+
+ if 'install' in kwargs:
+ scankwargs['install'] = kwargs['install']
+ scankwargs['install_dir'] = kwargs.get('install_dir_gir',
+ os.path.join(state.environment.get_datadir(), 'gir-1.0'))
+
+ if 'build_by_default' in kwargs:
+ scankwargs['build_by_default'] = kwargs['build_by_default']
+
+ return GirTarget(girfile, state.subdir, state.subproject, scankwargs)
+
+ def _make_typelib_target(self, state, typelib_output, typelib_cmd, generated_files, kwargs):
+ typelib_kwargs = {
+ 'input': generated_files,
+ 'output': typelib_output,
+ 'command': typelib_cmd,
+ }
+
+ if 'install' in kwargs:
+ typelib_kwargs['install'] = kwargs['install']
+ typelib_kwargs['install_dir'] = kwargs.get('install_dir_typelib',
+ os.path.join(state.environment.get_libdir(), 'girepository-1.0'))
+
+ if 'build_by_default' in kwargs:
+ typelib_kwargs['build_by_default'] = kwargs['build_by_default']
+
+ return TypelibTarget(typelib_output, state.subdir, state.subproject, typelib_kwargs)
+
+ # May mutate depends
+ def _gather_typelib_includes_and_update_depends(self, state, deps, depends):
+ # Need to recursively add deps on GirTarget sources from our
+ # dependencies and also find the include directories needed for the
+ # typelib generation custom target below.
+ typelib_includes = []
+ for dep in deps:
+ # Add a dependency on each GirTarget listed in dependencies and add
+ # the directory where it will be generated to the typelib includes
+ if isinstance(dep, InternalDependency):
+ for source in dep.sources:
+ if isinstance(source, GirTarget) and source not in depends:
+ depends.append(source)
+ subdir = os.path.join(state.environment.get_build_dir(),
+ source.get_subdir())
+ if subdir not in typelib_includes:
+ typelib_includes.append(subdir)
+ # Do the same, but for dependencies of dependencies. These are
+ # stored in the list of generated sources for each link dep (from
+ # girtarget.get_all_link_deps() above).
+ # FIXME: Store this in the original form from declare_dependency()
+ # so it can be used here directly.
+ elif isinstance(dep, build.SharedLibrary):
+ for source in dep.generated:
+ if isinstance(source, GirTarget):
+ subdir = os.path.join(state.environment.get_build_dir(),
+ source.get_subdir())
+ if subdir not in typelib_includes:
+ typelib_includes.append(subdir)
+ if isinstance(dep, Dependency):
+ girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='')
+ if girdir and girdir not in typelib_includes:
+ typelib_includes.append(girdir)
+ return typelib_includes
+
+ def _get_external_args_for_langs(self, state, langs):
+ ret = []
+ for lang in langs:
+ ret += state.environment.coredata.get_external_args(MachineChoice.HOST, lang)
+ return ret
+
+ @staticmethod
+ def _get_scanner_cflags(cflags):
+ 'g-ir-scanner only accepts -I/-D/-U; must ignore all other flags'
+ for f in cflags:
+ # _FORTIFY_SOURCE depends on / works together with -O, on the other hand this
+ # just invokes the preprocessor anyway
+ if f.startswith(('-D', '-U', '-I')) and not f.startswith('-D_FORTIFY_SOURCE'):
+ yield f
+
+ @staticmethod
+ def _get_scanner_ldflags(ldflags):
+ 'g-ir-scanner only accepts -L/-l; must ignore -F and other linker flags'
+ for f in ldflags:
+ if f.startswith(('-L', '-l', '--extra-library')):
+ yield f
+
+ @FeatureNewKwargs('generate_gir', '0.55.0', ['fatal_warnings'])
+ @FeatureNewKwargs('generate_gir', '0.40.0', ['build_by_default'])
+ @permittedKwargs({'sources', 'nsversion', 'namespace', 'symbol_prefix', 'identifier_prefix',
+ 'export_packages', 'includes', 'dependencies', 'link_with', 'include_directories',
+ 'install', 'install_dir_gir', 'install_dir_typelib', 'extra_args',
+ 'packages', 'header', 'build_by_default', 'fatal_warnings'})
+ def generate_gir(self, state, args, kwargs: T.Dict[str, T.Any]):
+ if not args:
+ raise MesonException('generate_gir takes at least one argument')
+ if kwargs.get('install_dir'):
+ raise MesonException('install_dir is not supported with generate_gir(), see "install_dir_gir" and "install_dir_typelib"')
+
+ girtargets = [self._unwrap_gir_target(arg, state) for arg in args]
+
+ if len(girtargets) > 1 and any([isinstance(el, build.Executable) for el in girtargets]):
+ raise MesonException('generate_gir only accepts a single argument when one of the arguments is an executable')
+
+ gir_dep, giscanner, gicompiler = self._get_gir_dep(state)
+
+ ns = kwargs.get('namespace')
+ if not ns:
+ raise MesonException('Missing "namespace" keyword argument')
+ nsversion = kwargs.get('nsversion')
+ if not nsversion:
+ raise MesonException('Missing "nsversion" keyword argument')
+ libsources = mesonlib.extract_as_list(kwargs, 'sources', pop=True)
+ girfile = f'{ns}-{nsversion}.gir'
+ srcdir = os.path.join(state.environment.get_source_dir(), state.subdir)
+ builddir = os.path.join(state.environment.get_build_dir(), state.subdir)
+ depends = gir_dep.sources + girtargets
+ gir_inc_dirs = []
+ langs_compilers = self._get_girtargets_langs_compilers(girtargets)
+ cflags, internal_ldflags, external_ldflags = self._get_langs_compilers_flags(state, langs_compilers)
+ deps = self._get_gir_targets_deps(girtargets)
+ deps += extract_as_list(kwargs, 'dependencies', pop=True)
+ deps += [gir_dep]
+ typelib_includes = self._gather_typelib_includes_and_update_depends(state, deps, depends)
+ # ldflags will be misinterpreted by gir scanner (showing
+ # spurious dependencies) but building GStreamer fails if they
+ # are not used here.
+ dep_cflags, dep_internal_ldflags, dep_external_ldflags, gi_includes = \
+ self._get_dependencies_flags(deps, state, depends, use_gir_args=True)
+ cflags += list(self._get_scanner_cflags(dep_cflags))
+ cflags += list(self._get_scanner_cflags(self._get_external_args_for_langs(state, [lc[0] for lc in langs_compilers])))
+ internal_ldflags += list(self._get_scanner_ldflags(dep_internal_ldflags))
+ external_ldflags += list(self._get_scanner_ldflags(dep_external_ldflags))
+ girtargets_inc_dirs = self._get_gir_targets_inc_dirs(girtargets)
+ inc_dirs = self._scan_inc_dirs(kwargs)
+
+ scan_command = [giscanner]
+ scan_command += ['--no-libtool']
+ scan_command += ['--namespace=' + ns, '--nsversion=' + nsversion]
+ scan_command += ['--warn-all']
+ scan_command += ['--output', '@OUTPUT@']
+ scan_command += self._scan_header(kwargs)
+ scan_command += self._scan_extra_args(kwargs)
+ scan_command += ['-I' + srcdir, '-I' + builddir]
+ scan_command += state.get_include_args(girtargets_inc_dirs)
+ scan_command += ['--filelist=' + self._make_gir_filelist(state, srcdir, ns, nsversion, girtargets, libsources)]
+ scan_command += self._scan_link_withs(state, depends, kwargs)
+ scan_command += self._scan_include(state, depends, gir_inc_dirs, kwargs)
+ scan_command += self._scan_symbol_prefix(kwargs)
+ scan_command += self._scan_identifier_prefix(kwargs)
+ scan_command += self._scan_export_packages(kwargs)
+ scan_command += ['--cflags-begin']
+ scan_command += cflags
+ scan_command += ['--cflags-end']
+ scan_command += state.get_include_args(inc_dirs)
+ scan_command += state.get_include_args(list(gi_includes) + gir_inc_dirs + inc_dirs, prefix='--add-include-path=')
+ scan_command += list(internal_ldflags)
+ scan_command += self._scan_gir_targets(state, girtargets)
+ scan_command += self._scan_langs(state, [lc[0] for lc in langs_compilers])
+ scan_command += list(external_ldflags)
+
+ if self._gir_has_option('--sources-top-dirs'):
+ scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_source_dir(), self.interpreter.subproject_dir, state.subproject)]
+ scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_build_dir(), self.interpreter.subproject_dir, state.subproject)]
+
+ if '--warn-error' in scan_command:
+ mlog.deprecation('Passing --warn-error is deprecated in favor of "fatal_warnings" keyword argument since v0.55')
+ fatal_warnings = kwargs.get('fatal_warnings', False)
+ if not isinstance(fatal_warnings, bool):
+ raise MesonException('fatal_warnings keyword argument must be a boolean')
+ if fatal_warnings:
+ scan_command.append('--warn-error')
+
+ generated_files = [f for f in libsources if isinstance(f, (GeneratedList, CustomTarget, CustomTargetIndex))]
+
+ scan_target = self._make_gir_target(state, girfile, scan_command, generated_files, depends, kwargs)
+
+ typelib_output = f'{ns}-{nsversion}.typelib'
+ typelib_cmd = [gicompiler, scan_target, '--output', '@OUTPUT@']
+ typelib_cmd += state.get_include_args(gir_inc_dirs, prefix='--includedir=')
+
+ for incdir in typelib_includes:
+ typelib_cmd += ["--includedir=" + incdir]
+
+ typelib_target = self._make_typelib_target(state, typelib_output, typelib_cmd, generated_files, kwargs)
+
+ self._devenv_append('GI_TYPELIB_PATH', os.path.join(state.environment.get_build_dir(), state.subdir))
+
+ rv = [scan_target, typelib_target]
+
+ return ModuleReturnValue(rv, rv)
+
+ @FeatureNewKwargs('build target', '0.40.0', ['build_by_default'])
+ @permittedKwargs({'build_by_default', 'depend_files'})
+ def compile_schemas(self, state, args, kwargs):
+ if args:
+ raise MesonException('Compile_schemas does not take positional arguments.')
+ srcdir = os.path.join(state.build_to_src, state.subdir)
+ outdir = state.subdir
+
+ cmd = [state.find_program('glib-compile-schemas')]
+ cmd += ['--targetdir', outdir, srcdir]
+ kwargs['command'] = cmd
+ kwargs['input'] = []
+ kwargs['output'] = 'gschemas.compiled'
+ if state.subdir == '':
+ targetname = 'gsettings-compile'
+ else:
+ targetname = 'gsettings-compile-' + state.subdir.replace('/', '_')
+ target_g = build.CustomTarget(targetname, state.subdir, state.subproject, kwargs)
+ self._devenv_append('GSETTINGS_SCHEMA_DIR', os.path.join(state.environment.get_build_dir(), state.subdir))
+ return ModuleReturnValue(target_g, [target_g])
+
+ @permittedKwargs({'sources', 'media', 'symlink_media', 'languages'})
+ @FeatureDeprecatedKwargs('gnome.yelp', '0.43.0', ['languages'],
+ 'Use a LINGUAS file in the source directory instead')
+ def yelp(self, state, args, kwargs):
+ if len(args) < 1:
+ raise MesonException('Yelp requires a project id')
+
+ project_id = args[0]
+ sources = mesonlib.stringlistify(kwargs.pop('sources', []))
+ if not sources:
+ if len(args) > 1:
+ sources = mesonlib.stringlistify(args[1:])
+ if not sources:
+ raise MesonException('Yelp requires a list of sources')
+ source_str = '@@'.join(sources)
+
+ langs = mesonlib.stringlistify(kwargs.pop('languages', []))
+ media = mesonlib.stringlistify(kwargs.pop('media', []))
+ symlinks = kwargs.pop('symlink_media', True)
+
+ if not isinstance(symlinks, bool):
+ raise MesonException('symlink_media must be a boolean')
+
+ if kwargs:
+ raise MesonException('Unknown arguments passed: {}'.format(', '.join(kwargs.keys())))
+
+ script = state.environment.get_build_command()
+ args = ['--internal',
+ 'yelphelper',
+ 'install',
+ '--subdir=' + state.subdir,
+ '--id=' + project_id,
+ '--installdir=' + os.path.join(state.environment.get_datadir(), 'help'),
+ '--sources=' + source_str]
+ if symlinks:
+ args.append('--symlinks=true')
+ if media:
+ args.append('--media=' + '@@'.join(media))
+ if langs:
+ args.append('--langs=' + '@@'.join(langs))
+ inscript = state.backend.get_executable_serialisation(script + args)
+
+ potargs = state.environment.get_build_command() + [
+ '--internal', 'yelphelper', 'pot',
+ '--subdir=' + state.subdir,
+ '--id=' + project_id,
+ '--sources=' + source_str,
+ ]
+ pottarget = build.RunTarget('help-' + project_id + '-pot', potargs,
+ [], state.subdir, state.subproject)
+
+ poargs = state.environment.get_build_command() + [
+ '--internal', 'yelphelper', 'update-po',
+ '--subdir=' + state.subdir,
+ '--id=' + project_id,
+ '--sources=' + source_str,
+ '--langs=' + '@@'.join(langs),
+ ]
+ potarget = build.RunTarget('help-' + project_id + '-update-po', poargs,
+ [], state.subdir, state.subproject)
+
+ rv = [inscript, pottarget, potarget]
+ return ModuleReturnValue(None, rv)
+
+ @FeatureNewKwargs('gnome.gtkdoc', '0.52.0', ['check'])
+ @FeatureNewKwargs('gnome.gtkdoc', '0.48.0', ['c_args'])
+ @FeatureNewKwargs('gnome.gtkdoc', '0.48.0', ['module_version'])
+ @FeatureNewKwargs('gnome.gtkdoc', '0.37.0', ['namespace', 'mode'])
+ @permittedKwargs({'main_xml', 'main_sgml', 'src_dir', 'dependencies', 'install',
+ 'install_dir', 'scan_args', 'scanobjs_args', 'gobject_typesfile',
+ 'fixxref_args', 'html_args', 'html_assets', 'content_files',
+ 'mkdb_args', 'ignore_headers', 'include_directories',
+ 'namespace', 'mode', 'expand_content_files', 'module_version',
+ 'c_args', 'check'})
+ def gtkdoc(self, state, args, kwargs):
+ if len(args) != 1:
+ raise MesonException('Gtkdoc must have one positional argument.')
+ modulename = args[0]
+ if not isinstance(modulename, str):
+ raise MesonException('Gtkdoc arg must be string.')
+ if 'src_dir' not in kwargs:
+ raise MesonException('Keyword argument src_dir missing.')
+ main_file = kwargs.get('main_sgml', '')
+ if not isinstance(main_file, str):
+ raise MesonException('Main sgml keyword argument must be a string.')
+ main_xml = kwargs.get('main_xml', '')
+ if not isinstance(main_xml, str):
+ raise MesonException('Main xml keyword argument must be a string.')
+ moduleversion = kwargs.get('module_version', '')
+ if not isinstance(moduleversion, str):
+ raise MesonException('Module version keyword argument must be a string.')
+ if main_xml != '':
+ if main_file != '':
+ raise MesonException('You can only specify main_xml or main_sgml, not both.')
+ main_file = main_xml
+ targetname = modulename + ('-' + moduleversion if moduleversion else '') + '-doc'
+ command = state.environment.get_build_command()
+
+ namespace = kwargs.get('namespace', '')
+ mode = kwargs.get('mode', 'auto')
+ VALID_MODES = ('xml', 'sgml', 'none', 'auto')
+ if mode not in VALID_MODES:
+ raise MesonException(f'gtkdoc: Mode {mode} is not a valid mode: {VALID_MODES}')
+
+ src_dirs = mesonlib.extract_as_list(kwargs, 'src_dir')
+ header_dirs = []
+ for src_dir in src_dirs:
+ if isinstance(src_dir, HoldableObject):
+ if not isinstance(src_dir, build.IncludeDirs):
+ raise MesonException('Invalid keyword argument for src_dir.')
+ for inc_dir in src_dir.get_incdirs():
+ header_dirs.append(os.path.join(state.environment.get_source_dir(),
+ src_dir.get_curdir(), inc_dir))
+ header_dirs.append(os.path.join(state.environment.get_build_dir(),
+ src_dir.get_curdir(), inc_dir))
+ else:
+ header_dirs.append(src_dir)
+
+ args = ['--internal', 'gtkdoc',
+ '--sourcedir=' + state.environment.get_source_dir(),
+ '--builddir=' + state.environment.get_build_dir(),
+ '--subdir=' + state.subdir,
+ '--headerdirs=' + '@@'.join(header_dirs),
+ '--mainfile=' + main_file,
+ '--modulename=' + modulename,
+ '--moduleversion=' + moduleversion,
+ '--mode=' + mode]
+ for tool in ['scan', 'scangobj', 'mkdb', 'mkhtml', 'fixxref']:
+ program_name = 'gtkdoc-' + tool
+ program = state.find_program(program_name)
+ path = program.get_path()
+ args.append(f'--{program_name}={path}')
+ if namespace:
+ args.append('--namespace=' + namespace)
+ args += self._unpack_args('--htmlargs=', 'html_args', kwargs)
+ args += self._unpack_args('--scanargs=', 'scan_args', kwargs)
+ args += self._unpack_args('--scanobjsargs=', 'scanobjs_args', kwargs)
+ args += self._unpack_args('--gobjects-types-file=', 'gobject_typesfile', kwargs, state)
+ args += self._unpack_args('--fixxrefargs=', 'fixxref_args', kwargs)
+ args += self._unpack_args('--mkdbargs=', 'mkdb_args', kwargs)
+ args += self._unpack_args('--html-assets=', 'html_assets', kwargs, state)
+
+ depends = []
+ content_files = []
+ for s in mesonlib.extract_as_list(kwargs, 'content_files'):
+ if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)):
+ depends.append(s)
+ for o in s.get_outputs():
+ content_files.append(os.path.join(state.environment.get_build_dir(),
+ state.backend.get_target_dir(s),
+ o))
+ elif isinstance(s, mesonlib.File):
+ content_files.append(s.absolute_path(state.environment.get_source_dir(),
+ state.environment.get_build_dir()))
+ elif isinstance(s, build.GeneratedList):
+ depends.append(s)
+ for gen_src in s.get_outputs():
+ content_files.append(os.path.join(state.environment.get_source_dir(),
+ state.subdir,
+ gen_src))
+ elif isinstance(s, str):
+ content_files.append(os.path.join(state.environment.get_source_dir(),
+ state.subdir,
+ s))
+ else:
+ raise MesonException(
+ f'Invalid object type: {s.__class__.__name__!r}')
+ args += ['--content-files=' + '@@'.join(content_files)]
+
+ args += self._unpack_args('--expand-content-files=', 'expand_content_files', kwargs, state)
+ args += self._unpack_args('--ignore-headers=', 'ignore_headers', kwargs)
+ args += self._unpack_args('--installdir=', 'install_dir', kwargs)
+ args += self._get_build_args(kwargs, state, depends)
+ custom_kwargs = {'output': modulename + '-decl.txt',
+ 'command': command + args,
+ 'depends': depends,
+ 'build_always_stale': True,
+ }
+ custom_target = build.CustomTarget(targetname, state.subdir, state.subproject, custom_kwargs)
+ alias_target = build.AliasTarget(targetname, [custom_target], state.subdir, state.subproject)
+ if kwargs.get('check', False):
+ check_cmd = state.find_program('gtkdoc-check')
+ check_env = ['DOC_MODULE=' + modulename,
+ 'DOC_MAIN_SGML_FILE=' + main_file]
+ check_args = [targetname + '-check', check_cmd]
+ check_workdir = os.path.join(state.environment.get_build_dir(), state.subdir)
+ state.test(check_args, env=check_env, workdir=check_workdir, depends=custom_target)
+ res = [custom_target, alias_target]
+ if kwargs.get('install', True):
+ res.append(state.backend.get_executable_serialisation(command + args))
+ return ModuleReturnValue(custom_target, res)
+
+ def _get_build_args(self, kwargs, state, depends):
+ args = []
+ deps = extract_as_list(kwargs, 'dependencies')
+ cflags = []
+ cflags.extend(mesonlib.stringlistify(kwargs.pop('c_args', [])))
+ deps_cflags, internal_ldflags, external_ldflags, gi_includes = \
+ self._get_dependencies_flags(deps, state, depends, include_rpath=True)
+ inc_dirs = mesonlib.extract_as_list(kwargs, 'include_directories')
+ for incd in inc_dirs:
+ if not isinstance(incd, (str, build.IncludeDirs)):
+ raise MesonException(
+ 'Gir include dirs should be include_directories().')
+
+ cflags.extend(deps_cflags)
+ cflags.extend(state.get_include_args(inc_dirs))
+ ldflags = []
+ ldflags.extend(internal_ldflags)
+ ldflags.extend(external_ldflags)
+
+ cflags.extend(state.environment.coredata.get_external_args(MachineChoice.HOST, 'c'))
+ ldflags.extend(state.environment.coredata.get_external_link_args(MachineChoice.HOST, 'c'))
+ compiler = state.environment.coredata.compilers[MachineChoice.HOST]['c']
+
+ compiler_flags = self._get_langs_compilers_flags(state, [('c', compiler)])
+ cflags.extend(compiler_flags[0])
+ ldflags.extend(compiler_flags[1])
+ ldflags.extend(compiler_flags[2])
+ if compiler:
+ args += ['--cc=%s' % join_args(compiler.get_exelist())]
+ args += ['--ld=%s' % join_args(compiler.get_linker_exelist())]
+ if cflags:
+ args += ['--cflags=%s' % join_args(cflags)]
+ if ldflags:
+ args += ['--ldflags=%s' % join_args(ldflags)]
+
+ return args
+
+ @noKwargs
+ def gtkdoc_html_dir(self, state, args, kwargs):
+ if len(args) != 1:
+ raise MesonException('Must have exactly one argument.')
+ modulename = args[0]
+ if not isinstance(modulename, str):
+ raise MesonException('Argument must be a string')
+ return os.path.join('share/gtk-doc/html', modulename)
+
+ @staticmethod
+ def _unpack_args(arg, kwarg_name, kwargs, expend_file_state=None):
+ if kwarg_name not in kwargs:
+ return []
+
+ new_args = mesonlib.extract_as_list(kwargs, kwarg_name)
+ args = []
+ for i in new_args:
+ if expend_file_state and isinstance(i, mesonlib.File):
+ i = i.absolute_path(expend_file_state.environment.get_source_dir(), expend_file_state.environment.get_build_dir())
+ elif expend_file_state and isinstance(i, str):
+ i = os.path.join(expend_file_state.environment.get_source_dir(), expend_file_state.subdir, i)
+ elif not isinstance(i, str):
+ raise MesonException(kwarg_name + ' values must be strings.')
+ args.append(i)
+
+ if args:
+ return [arg + '@@'.join(args)]
+
+ return []
+
+ def _get_autocleanup_args(self, kwargs, glib_version):
+ if not mesonlib.version_compare(glib_version, '>= 2.49.1'):
+ # Warn if requested, silently disable if not
+ if 'autocleanup' in kwargs:
+ mlog.warning('Glib version ({}) is too old to support the \'autocleanup\' '
+ 'kwarg, need 2.49.1 or newer'.format(glib_version))
+ return []
+ autocleanup = kwargs.pop('autocleanup', 'all')
+ values = ('none', 'objects', 'all')
+ if autocleanup not in values:
+ raise MesonException('gdbus_codegen does not support {!r} as an autocleanup value, '
+ 'must be one of: {!r}'.format(autocleanup, ', '.join(values)))
+ return ['--c-generate-autocleanup', autocleanup]
+
+ @FeatureNewKwargs('build target', '0.46.0', ['install_header', 'install_dir', 'sources'])
+ @FeatureNewKwargs('build target', '0.40.0', ['build_by_default'])
+ @FeatureNewKwargs('build target', '0.47.0', ['extra_args', 'autocleanup'])
+ @permittedKwargs({'interface_prefix', 'namespace', 'extra_args', 'autocleanup', 'object_manager', 'build_by_default',
+ 'annotations', 'docbook', 'install_header', 'install_dir', 'sources'})
+ def gdbus_codegen(self, state, args, kwargs):
+ if len(args) not in (1, 2):
+ raise MesonException('gdbus_codegen takes at most two arguments, name and xml file.')
+ namebase = args[0]
+ xml_files = args[1:]
+ cmd = [state.find_program('gdbus-codegen')]
+ extra_args = mesonlib.stringlistify(kwargs.pop('extra_args', []))
+ cmd += extra_args
+ # Autocleanup supported?
+ glib_version = self._get_native_glib_version(state)
+ cmd += self._get_autocleanup_args(kwargs, glib_version)
+ if 'interface_prefix' in kwargs:
+ cmd += ['--interface-prefix', kwargs.pop('interface_prefix')]
+ if 'namespace' in kwargs:
+ cmd += ['--c-namespace', kwargs.pop('namespace')]
+ if kwargs.get('object_manager', False):
+ cmd += ['--c-generate-object-manager']
+ if 'sources' in kwargs:
+ xml_files += mesonlib.listify(kwargs.pop('sources'))
+ build_by_default = kwargs.get('build_by_default', False)
+
+ # Annotations are a bit ugly in that they are a list of lists of strings...
+ annotations = kwargs.pop('annotations', [])
+ if not isinstance(annotations, list):
+ raise MesonException('annotations takes a list')
+ if annotations and isinstance(annotations, list) and not isinstance(annotations[0], list):
+ annotations = [annotations]
+
+ for annotation in annotations:
+ if len(annotation) != 3 or not all(isinstance(i, str) for i in annotation):
+ raise MesonException('Annotations must be made up of 3 strings for ELEMENT, KEY, and VALUE')
+ cmd += ['--annotate'] + annotation
+
+ targets = []
+ install_header = kwargs.get('install_header', False)
+ install_dir = kwargs.get('install_dir', state.environment.coredata.get_option(mesonlib.OptionKey('includedir')))
+
+ output = namebase + '.c'
+ # Added in https://gitlab.gnome.org/GNOME/glib/commit/e4d68c7b3e8b01ab1a4231bf6da21d045cb5a816 (2.55.2)
+ # Fixed in https://gitlab.gnome.org/GNOME/glib/commit/cd1f82d8fc741a2203582c12cc21b4dacf7e1872 (2.56.2)
+ if mesonlib.version_compare(glib_version, '>= 2.56.2'):
+ custom_kwargs = {'input': xml_files,
+ 'output': output,
+ 'command': cmd + ['--body', '--output', '@OUTPUT@', '@INPUT@'],
+ 'build_by_default': build_by_default
+ }
+ else:
+ if 'docbook' in kwargs:
+ docbook = kwargs['docbook']
+ if not isinstance(docbook, str):
+ raise MesonException('docbook value must be a string.')
+
+ cmd += ['--generate-docbook', docbook]
+
+ # https://git.gnome.org/browse/glib/commit/?id=ee09bb704fe9ccb24d92dd86696a0e6bb8f0dc1a
+ if mesonlib.version_compare(glib_version, '>= 2.51.3'):
+ cmd += ['--output-directory', '@OUTDIR@', '--generate-c-code', namebase, '@INPUT@']
+ else:
+ self._print_gdbus_warning()
+ cmd += ['--generate-c-code', '@OUTDIR@/' + namebase, '@INPUT@']
+
+ custom_kwargs = {'input': xml_files,
+ 'output': output,
+ 'command': cmd,
+ 'build_by_default': build_by_default
+ }
+
+ cfile_custom_target = build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs)
+ targets.append(cfile_custom_target)
+
+ output = namebase + '.h'
+ if mesonlib.version_compare(glib_version, '>= 2.56.2'):
+ custom_kwargs = {'input': xml_files,
+ 'output': output,
+ 'command': cmd + ['--header', '--output', '@OUTPUT@', '@INPUT@'],
+ 'build_by_default': build_by_default,
+ 'install': install_header,
+ 'install_dir': install_dir
+ }
+ else:
+ custom_kwargs = {'input': xml_files,
+ 'output': output,
+ 'command': cmd,
+ 'build_by_default': build_by_default,
+ 'install': install_header,
+ 'install_dir': install_dir,
+ 'depends': cfile_custom_target
+ }
+
+ hfile_custom_target = build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs)
+ targets.append(hfile_custom_target)
+
+ if 'docbook' in kwargs:
+ docbook = kwargs['docbook']
+ if not isinstance(docbook, str):
+ raise MesonException('docbook value must be a string.')
+
+ docbook_cmd = cmd + ['--output-directory', '@OUTDIR@', '--generate-docbook', docbook, '@INPUT@']
+
+ # The docbook output is always ${docbook}-${name_of_xml_file}
+ output = namebase + '-docbook'
+ outputs = []
+ for f in xml_files:
+ outputs.append('{}-{}'.format(docbook, os.path.basename(str(f))))
+
+ if mesonlib.version_compare(glib_version, '>= 2.56.2'):
+ custom_kwargs = {'input': xml_files,
+ 'output': outputs,
+ 'command': docbook_cmd,
+ 'build_by_default': build_by_default
+ }
+ else:
+ custom_kwargs = {'input': xml_files,
+ 'output': outputs,
+ 'command': cmd,
+ 'build_by_default': build_by_default,
+ 'depends': cfile_custom_target
+ }
+
+ docbook_custom_target = build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs)
+ targets.append(docbook_custom_target)
+
+ return ModuleReturnValue(targets, targets)
+
+ @permittedKwargs({'sources', 'c_template', 'h_template', 'install_header', 'install_dir',
+ 'comments', 'identifier_prefix', 'symbol_prefix', 'eprod', 'vprod',
+ 'fhead', 'fprod', 'ftail', 'vhead', 'vtail', 'depends'})
+ def mkenums(self, state, args, kwargs):
+ if len(args) != 1:
+ raise MesonException('Mkenums requires one positional argument.')
+ basename = args[0]
+
+ if 'sources' not in kwargs:
+ raise MesonException('Missing keyword argument "sources".')
+ sources = kwargs.pop('sources')
+ if isinstance(sources, str):
+ sources = [sources]
+ elif not isinstance(sources, list):
+ raise MesonException(
+ 'Sources keyword argument must be a string or array.')
+
+ cmd = []
+ known_kwargs = ['comments', 'eprod', 'fhead', 'fprod', 'ftail',
+ 'identifier_prefix', 'symbol_prefix', 'template',
+ 'vhead', 'vprod', 'vtail']
+ known_custom_target_kwargs = ['install_dir', 'build_always',
+ 'depends', 'depend_files']
+ c_template = h_template = None
+ install_header = False
+ for arg, value in kwargs.items():
+ if arg == 'sources':
+ raise AssertionError("sources should've already been handled")
+ elif arg == 'c_template':
+ c_template = value
+ if isinstance(c_template, mesonlib.File):
+ c_template = c_template.absolute_path(state.environment.source_dir, state.environment.build_dir)
+ if 'template' in kwargs:
+ raise MesonException('Mkenums does not accept both '
+ 'c_template and template keyword '
+ 'arguments at the same time.')
+ elif arg == 'h_template':
+ h_template = value
+ if isinstance(h_template, mesonlib.File):
+ h_template = h_template.absolute_path(state.environment.source_dir, state.environment.build_dir)
+ if 'template' in kwargs:
+ raise MesonException('Mkenums does not accept both '
+ 'h_template and template keyword '
+ 'arguments at the same time.')
+ elif arg == 'install_header':
+ install_header = value
+ elif arg in known_kwargs:
+ cmd += ['--' + arg.replace('_', '-'), value]
+ elif arg not in known_custom_target_kwargs:
+ raise MesonException(
+ f'Mkenums does not take a {arg} keyword argument.')
+ cmd = [state.find_program(['glib-mkenums', 'mkenums'])] + cmd
+ custom_kwargs = {}
+ for arg in known_custom_target_kwargs:
+ if arg in kwargs:
+ custom_kwargs[arg] = kwargs[arg]
+
+ targets = []
+
+ if h_template is not None:
+ h_output = os.path.basename(os.path.splitext(h_template)[0])
+ # We always set template as the first element in the source array
+ # so --template consumes it.
+ h_cmd = cmd + ['--template', '@INPUT@']
+ h_sources = [h_template] + sources
+ custom_kwargs['install'] = install_header
+ if 'install_dir' not in custom_kwargs:
+ custom_kwargs['install_dir'] = \
+ state.environment.coredata.get_option(mesonlib.OptionKey('includedir'))
+ h_target = self._make_mkenum_custom_target(state, h_sources,
+ h_output, h_cmd,
+ custom_kwargs)
+ targets.append(h_target)
+
+ if c_template is not None:
+ c_output = os.path.basename(os.path.splitext(c_template)[0])
+ # We always set template as the first element in the source array
+ # so --template consumes it.
+ c_cmd = cmd + ['--template', '@INPUT@']
+ c_sources = [c_template] + sources
+ # Never install the C file. Complain on bug tracker if you need it.
+ custom_kwargs['install'] = False
+ if h_template is not None:
+ if 'depends' in custom_kwargs:
+ custom_kwargs['depends'] += [h_target]
+ else:
+ custom_kwargs['depends'] = h_target
+ c_target = self._make_mkenum_custom_target(state, c_sources,
+ c_output, c_cmd,
+ custom_kwargs)
+ targets.insert(0, c_target)
+
+ if c_template is None and h_template is None:
+ generic_cmd = cmd + ['@INPUT@']
+ custom_kwargs['install'] = install_header
+ if 'install_dir' not in custom_kwargs:
+ custom_kwargs['install_dir'] = \
+ state.environment.coredata.get_option(mesonlib.OptionKey('includedir'))
+ target = self._make_mkenum_custom_target(state, sources, basename,
+ generic_cmd, custom_kwargs)
+ return ModuleReturnValue(target, [target])
+ elif len(targets) == 1:
+ return ModuleReturnValue(targets[0], [targets[0]])
+ else:
+ return ModuleReturnValue(targets, targets)
+
+ @FeatureNew('gnome.mkenums_simple', '0.42.0')
+ def mkenums_simple(self, state, args, kwargs):
+ hdr_filename = args[0] + '.h'
+ body_filename = args[0] + '.c'
+
+ # not really needed, just for sanity checking
+ forbidden_kwargs = ['c_template', 'h_template', 'eprod', 'fhead',
+ 'fprod', 'ftail', 'vhead', 'vtail', 'comments']
+ for arg in forbidden_kwargs:
+ if arg in kwargs:
+ raise MesonException(f'mkenums_simple() does not take a {arg} keyword argument')
+
+ # kwargs to pass as-is from mkenums_simple() to mkenums()
+ shared_kwargs = ['sources', 'install_header', 'install_dir',
+ 'identifier_prefix', 'symbol_prefix']
+ mkenums_kwargs = {}
+ for arg in shared_kwargs:
+ if arg in kwargs:
+ mkenums_kwargs[arg] = kwargs[arg]
+
+ # .c file generation
+ c_file_kwargs = copy.deepcopy(mkenums_kwargs)
+ if 'sources' not in kwargs:
+ raise MesonException('Missing keyword argument "sources".')
+ sources = kwargs['sources']
+ if isinstance(sources, str):
+ sources = [sources]
+ elif not isinstance(sources, list):
+ raise MesonException(
+ 'Sources keyword argument must be a string or array.')
+
+ # The `install_header` argument will be used by mkenums() when
+ # not using template files, so we need to forcibly unset it
+ # when generating the C source file, otherwise we will end up
+ # installing it
+ c_file_kwargs['install_header'] = False
+
+ header_prefix = kwargs.get('header_prefix', '')
+ decl_decorator = kwargs.get('decorator', '')
+ func_prefix = kwargs.get('function_prefix', '')
+ body_prefix = kwargs.get('body_prefix', '')
+
+ # Maybe we should write our own template files into the build dir
+ # instead, but that seems like much more work, nice as it would be.
+ fhead = ''
+ if body_prefix != '':
+ fhead += '%s\n' % body_prefix
+ fhead += '#include "%s"\n' % hdr_filename
+ for hdr in sources:
+ fhead += '#include "%s"\n' % os.path.basename(str(hdr))
+ fhead += '''
+#define C_ENUM(v) ((gint) v)
+#define C_FLAGS(v) ((guint) v)
+'''
+ c_file_kwargs['fhead'] = fhead
+
+ c_file_kwargs['fprod'] = '''
+/* enumerations from "@basename@" */
+'''
+
+ c_file_kwargs['vhead'] = '''
+GType
+%s@enum_name@_get_type (void)
+{
+ static gsize gtype_id = 0;
+ static const G@Type@Value values[] = {''' % func_prefix
+
+ c_file_kwargs['vprod'] = ' { C_@TYPE@(@VALUENAME@), "@VALUENAME@", "@valuenick@" },'
+
+ c_file_kwargs['vtail'] = ''' { 0, NULL, NULL }
+ };
+ if (g_once_init_enter (&gtype_id)) {
+ GType new_type = g_@type@_register_static (g_intern_static_string ("@EnumName@"), values);
+ g_once_init_leave (&gtype_id, new_type);
+ }
+ return (GType) gtype_id;
+}'''
+
+ rv = self.mkenums(state, [body_filename], c_file_kwargs)
+ c_file = rv.return_value
+
+ # .h file generation
+ h_file_kwargs = copy.deepcopy(mkenums_kwargs)
+
+ h_file_kwargs['fhead'] = '''#pragma once
+
+#include <glib-object.h>
+{}
+
+G_BEGIN_DECLS
+'''.format(header_prefix)
+
+ h_file_kwargs['fprod'] = '''
+/* enumerations from "@basename@" */
+'''
+
+ h_file_kwargs['vhead'] = '''
+{}
+GType {}@enum_name@_get_type (void);
+#define @ENUMPREFIX@_TYPE_@ENUMSHORT@ ({}@enum_name@_get_type())'''.format(decl_decorator, func_prefix, func_prefix)
+
+ h_file_kwargs['ftail'] = '''
+G_END_DECLS'''
+
+ rv = self.mkenums(state, [hdr_filename], h_file_kwargs)
+ h_file = rv.return_value
+
+ return ModuleReturnValue([c_file, h_file], [c_file, h_file])
+
+ @staticmethod
+ def _make_mkenum_custom_target(state, sources, output, cmd, kwargs):
+ custom_kwargs = {
+ 'input': sources,
+ 'output': output,
+ 'capture': True,
+ 'command': cmd
+ }
+ custom_kwargs.update(kwargs)
+ return build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs,
+ # https://github.com/mesonbuild/meson/issues/973
+ absolute_paths=True)
+
+ @permittedKwargs({'sources', 'prefix', 'install_header', 'install_dir', 'stdinc',
+ 'nostdinc', 'internal', 'skip_source', 'valist_marshallers',
+ 'extra_args'})
+ def genmarshal(self, state, args, kwargs):
+ if len(args) != 1:
+ raise MesonException(
+ 'Genmarshal requires one positional argument.')
+ output = args[0]
+
+ if 'sources' not in kwargs:
+ raise MesonException('Missing keyword argument "sources".')
+ sources = kwargs.pop('sources')
+ if isinstance(sources, str):
+ sources = [sources]
+ elif not isinstance(sources, list):
+ raise MesonException(
+ 'Sources keyword argument must be a string or array.')
+
+ new_genmarshal = mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.53.3')
+
+ cmd = [state.find_program('glib-genmarshal')]
+ known_kwargs = ['internal', 'nostdinc', 'skip_source', 'stdinc',
+ 'valist_marshallers', 'extra_args']
+ known_custom_target_kwargs = ['build_always', 'depends',
+ 'depend_files', 'install_dir',
+ 'install_header']
+ for arg, value in kwargs.items():
+ if arg == 'prefix':
+ cmd += ['--prefix', value]
+ elif arg == 'extra_args':
+ if new_genmarshal:
+ cmd += mesonlib.stringlistify(value)
+ else:
+ mlog.warning('The current version of GLib does not support extra arguments \n'
+ 'for glib-genmarshal. You need at least GLib 2.53.3. See ',
+ mlog.bold('https://github.com/mesonbuild/meson/pull/2049'))
+ elif arg in known_kwargs and value:
+ cmd += ['--' + arg.replace('_', '-')]
+ elif arg not in known_custom_target_kwargs:
+ raise MesonException(
+ 'Genmarshal does not take a {} keyword argument.'.format(
+ arg))
+
+ install_header = kwargs.pop('install_header', False)
+ install_dir = kwargs.pop('install_dir', [])
+
+ custom_kwargs = {
+ 'input': sources,
+ }
+
+ # https://github.com/GNOME/glib/commit/0fbc98097fac4d3e647684f344e508abae109fdf
+ if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.51.0'):
+ cmd += ['--output', '@OUTPUT@']
+ else:
+ custom_kwargs['capture'] = True
+
+ for arg in known_custom_target_kwargs:
+ if arg in kwargs:
+ custom_kwargs[arg] = kwargs[arg]
+
+ header_file = output + '.h'
+ custom_kwargs['command'] = cmd + ['--body', '@INPUT@']
+ if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.53.4'):
+ # Silence any warnings about missing prototypes
+ custom_kwargs['command'] += ['--include-header', header_file]
+ custom_kwargs['output'] = output + '.c'
+ body = build.CustomTarget(output + '_c', state.subdir, state.subproject, custom_kwargs)
+
+ custom_kwargs['install'] = install_header
+ custom_kwargs['install_dir'] = install_dir
+ if new_genmarshal:
+ cmd += ['--pragma-once']
+ custom_kwargs['command'] = cmd + ['--header', '@INPUT@']
+ custom_kwargs['output'] = header_file
+ header = build.CustomTarget(output + '_h', state.subdir, state.subproject, custom_kwargs)
+
+ rv = [body, header]
+ return ModuleReturnValue(rv, rv)
+
+ @staticmethod
+ def _vapi_args_to_command(prefix, variable, kwargs, accept_vapi=False):
+ arg_list = mesonlib.extract_as_list(kwargs, variable)
+ ret = []
+ for arg in arg_list:
+ if not isinstance(arg, str):
+ types = 'strings' + ' or InternalDependencys' if accept_vapi else ''
+ raise MesonException(f'All {variable} must be {types}')
+ ret.append(prefix + arg)
+ return ret
+
+ def _extract_vapi_packages(self, state, kwargs):
+ '''
+ Packages are special because we need to:
+ - Get a list of packages for the .deps file
+ - Get a list of depends for any VapiTargets
+ - Get package name from VapiTargets
+ - Add include dirs for any VapiTargets
+ '''
+ arg_list = kwargs.get('packages')
+ if not arg_list:
+ return [], [], [], []
+ arg_list = mesonlib.listify(arg_list)
+ vapi_depends = []
+ vapi_packages = []
+ vapi_includes = []
+ ret = []
+ remaining_args = []
+ for arg in arg_list:
+ if isinstance(arg, InternalDependency):
+ targets = [t for t in arg.sources if isinstance(t, VapiTarget)]
+ for target in targets:
+ srcdir = os.path.join(state.environment.get_source_dir(),
+ target.get_subdir())
+ outdir = os.path.join(state.environment.get_build_dir(),
+ target.get_subdir())
+ outfile = target.get_outputs()[0][:-5] # Strip .vapi
+ ret.append('--vapidir=' + outdir)
+ ret.append('--girdir=' + outdir)
+ ret.append('--pkg=' + outfile)
+ vapi_depends.append(target)
+ vapi_packages.append(outfile)
+ vapi_includes.append(srcdir)
+ else:
+ vapi_packages.append(arg)
+ remaining_args.append(arg)
+
+ kwargs['packages'] = remaining_args
+ vapi_args = ret + self._vapi_args_to_command('--pkg=', 'packages', kwargs, accept_vapi=True)
+ return vapi_args, vapi_depends, vapi_packages, vapi_includes
+
+ def _generate_deps(self, state, library, packages, install_dir):
+ outdir = state.environment.scratch_dir
+ fname = os.path.join(outdir, library + '.deps')
+ with open(fname, 'w', encoding='utf-8') as ofile:
+ for package in packages:
+ ofile.write(package + '\n')
+ return build.Data([mesonlib.File(True, outdir, fname)], install_dir, None, state.subproject)
+
+ def _get_vapi_link_with(self, target):
+ link_with = []
+ for dep in target.get_target_dependencies():
+ if isinstance(dep, build.SharedLibrary):
+ link_with.append(dep)
+ elif isinstance(dep, GirTarget):
+ link_with += self._get_vapi_link_with(dep)
+ return link_with
+
+ @permittedKwargs({'sources', 'packages', 'metadata_dirs', 'gir_dirs',
+ 'vapi_dirs', 'install', 'install_dir'})
+ def generate_vapi(self, state, args, kwargs):
+ if len(args) != 1:
+ raise MesonException('The library name is required')
+
+ if not isinstance(args[0], str):
+ raise MesonException('The first argument must be the name of the library')
+ created_values = []
+
+ library = args[0]
+ build_dir = os.path.join(state.environment.get_build_dir(), state.subdir)
+ source_dir = os.path.join(state.environment.get_source_dir(), state.subdir)
+ pkg_cmd, vapi_depends, vapi_packages, vapi_includes = self._extract_vapi_packages(state, kwargs)
+ if 'VAPIGEN' in os.environ:
+ cmd = [state.find_program(os.environ['VAPIGEN'])]
+ else:
+ cmd = [state.find_program('vapigen')]
+ cmd += ['--quiet', '--library=' + library, '--directory=' + build_dir]
+ cmd += self._vapi_args_to_command('--vapidir=', 'vapi_dirs', kwargs)
+ cmd += self._vapi_args_to_command('--metadatadir=', 'metadata_dirs', kwargs)
+ cmd += self._vapi_args_to_command('--girdir=', 'gir_dirs', kwargs)
+ cmd += pkg_cmd
+ cmd += ['--metadatadir=' + source_dir]
+
+ if 'sources' not in kwargs:
+ raise MesonException('sources are required to generate the vapi file')
+
+ inputs = mesonlib.extract_as_list(kwargs, 'sources')
+
+ link_with = []
+ for i in inputs:
+ if isinstance(i, str):
+ cmd.append(os.path.join(source_dir, i))
+ elif isinstance(i, GirTarget):
+ link_with += self._get_vapi_link_with(i)
+ subdir = os.path.join(state.environment.get_build_dir(),
+ i.get_subdir())
+ gir_file = os.path.join(subdir, i.get_outputs()[0])
+ cmd.append(gir_file)
+ else:
+ raise MesonException('Input must be a str or GirTarget')
+
+ vapi_output = library + '.vapi'
+ custom_kwargs = {
+ 'command': cmd,
+ 'input': inputs,
+ 'output': vapi_output,
+ 'depends': vapi_depends,
+ }
+ install_dir = kwargs.get('install_dir',
+ os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('datadir')),
+ 'vala', 'vapi'))
+ if kwargs.get('install'):
+ custom_kwargs['install'] = kwargs['install']
+ custom_kwargs['install_dir'] = install_dir
+
+ # We shouldn't need this locally but we install it
+ deps_target = self._generate_deps(state, library, vapi_packages, install_dir)
+ created_values.append(deps_target)
+ vapi_target = VapiTarget(vapi_output, state.subdir, state.subproject, custom_kwargs)
+
+ # So to try our best to get this to just work we need:
+ # - link with with the correct library
+ # - include the vapi and dependent vapi files in sources
+ # - add relevant directories to include dirs
+ incs = [build.IncludeDirs(state.subdir, ['.'] + vapi_includes, False)]
+ sources = [vapi_target] + vapi_depends
+ rv = InternalDependency(None, incs, [], [], link_with, [], sources, [], {})
+ created_values.append(rv)
+ return ModuleReturnValue(rv, created_values)
+
+def initialize(*args, **kwargs):
+ mod = GnomeModule(*args, **kwargs)
+ mod.interpreter.append_holder_map(GResourceTarget, interpreter.CustomTargetHolder)
+ mod.interpreter.append_holder_map(GResourceHeaderTarget, interpreter.CustomTargetHolder)
+ mod.interpreter.append_holder_map(GirTarget, interpreter.CustomTargetHolder)
+ mod.interpreter.append_holder_map(TypelibTarget, interpreter.CustomTargetHolder)
+ mod.interpreter.append_holder_map(VapiTarget, interpreter.CustomTargetHolder)
+ return mod
diff --git a/meson/mesonbuild/modules/hotdoc.py b/meson/mesonbuild/modules/hotdoc.py
new file mode 100644
index 000000000..4dccd067a
--- /dev/null
+++ b/meson/mesonbuild/modules/hotdoc.py
@@ -0,0 +1,432 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''This module provides helper functions for generating documentation using hotdoc'''
+
+import os
+from collections import OrderedDict
+
+from mesonbuild import mesonlib
+from mesonbuild import mlog, build
+from mesonbuild.coredata import MesonException
+from . import ModuleReturnValue
+from . import ExtensionModule
+from ..dependencies import Dependency, InternalDependency
+from ..interpreterbase import FeatureNew, InvalidArguments, noPosargs, noKwargs
+from ..interpreter import CustomTargetHolder
+from ..programs import ExternalProgram
+
+
+def ensure_list(value):
+ if not isinstance(value, list):
+ return [value]
+ return value
+
+
+MIN_HOTDOC_VERSION = '0.8.100'
+
+
+class HotdocTargetBuilder:
+ def __init__(self, name, state, hotdoc, interpreter, kwargs):
+ self.hotdoc = hotdoc
+ self.build_by_default = kwargs.pop('build_by_default', False)
+ self.kwargs = kwargs
+ self.name = name
+ self.state = state
+ self.interpreter = interpreter
+ self.include_paths = OrderedDict()
+
+ self.builddir = state.environment.get_build_dir()
+ self.sourcedir = state.environment.get_source_dir()
+ self.subdir = state.subdir
+ self.build_command = state.environment.get_build_command()
+
+ self.cmd = ['conf', '--project-name', name, "--disable-incremental-build",
+ '--output', os.path.join(self.builddir, self.subdir, self.name + '-doc')]
+
+ self._extra_extension_paths = set()
+ self.extra_assets = set()
+ self._dependencies = []
+ self._subprojects = []
+
+ def process_known_arg(self, option, types, argname=None,
+ value_processor=None, mandatory=False,
+ force_list=False):
+ if not argname:
+ argname = option.strip("-").replace("-", "_")
+
+ value, _ = self.get_value(
+ types, argname, None, value_processor, mandatory, force_list)
+
+ self.set_arg_value(option, value)
+
+ def set_arg_value(self, option, value):
+ if value is None:
+ return
+
+ if isinstance(value, bool):
+ if value:
+ self.cmd.append(option)
+ elif isinstance(value, list):
+ # Do not do anything on empty lists
+ if value:
+ # https://bugs.python.org/issue9334 (from 2010 :( )
+ # The syntax with nargs=+ is inherently ambiguous
+ # A workaround for this case is to simply prefix with a space
+ # every value starting with a dash
+ escaped_value = []
+ for e in value:
+ if isinstance(e, str) and e.startswith('-'):
+ escaped_value += [' %s' % e]
+ else:
+ escaped_value += [e]
+ if option:
+ self.cmd.extend([option] + escaped_value)
+ else:
+ self.cmd.extend(escaped_value)
+ else:
+ # argparse gets confused if value(s) start with a dash.
+ # When an option expects a single value, the unambiguous way
+ # to specify it is with =
+ if isinstance(value, str):
+ self.cmd.extend([f'{option}={value}'])
+ else:
+ self.cmd.extend([option, value])
+
+ def check_extra_arg_type(self, arg, value):
+ if isinstance(value, list):
+ for v in value:
+ self.check_extra_arg_type(arg, v)
+ return
+
+ valid_types = (str, bool, mesonlib.File, build.IncludeDirs, build.CustomTarget, build.BuildTarget)
+ if not isinstance(value, valid_types):
+ raise InvalidArguments('Argument "{}={}" should be of type: {}.'.format(
+ arg, value, [t.__name__ for t in valid_types]))
+
+ def process_extra_args(self):
+ for arg, value in self.kwargs.items():
+ option = "--" + arg.replace("_", "-")
+ self.check_extra_arg_type(arg, value)
+ self.set_arg_value(option, value)
+
+ def get_value(self, types, argname, default=None, value_processor=None,
+ mandatory=False, force_list=False):
+ if not isinstance(types, list):
+ types = [types]
+ try:
+ uvalue = value = self.kwargs.pop(argname)
+ if value_processor:
+ value = value_processor(value)
+
+ for t in types:
+ if isinstance(value, t):
+ if force_list and not isinstance(value, list):
+ return [value], uvalue
+ return value, uvalue
+ raise MesonException("%s field value %s is not valid,"
+ " valid types are %s" % (argname, value,
+ types))
+ except KeyError:
+ if mandatory:
+ raise MesonException("%s mandatory field not found" % argname)
+
+ if default is not None:
+ return default, default
+
+ return None, None
+
+ def setup_extension_paths(self, paths):
+ if not isinstance(paths, list):
+ paths = [paths]
+
+ for path in paths:
+ self.add_extension_paths([path])
+
+ return []
+
+ def add_extension_paths(self, paths):
+ for path in paths:
+ if path in self._extra_extension_paths:
+ continue
+
+ self._extra_extension_paths.add(path)
+ self.cmd.extend(["--extra-extension-path", path])
+
+ def process_extra_extension_paths(self):
+ self.get_value([list, str], 'extra_extensions_paths',
+ default="", value_processor=self.setup_extension_paths)
+
+ def replace_dirs_in_string(self, string):
+ return string.replace("@SOURCE_ROOT@", self.sourcedir).replace("@BUILD_ROOT@", self.builddir)
+
+ def process_gi_c_source_roots(self):
+ if self.hotdoc.run_hotdoc(['--has-extension=gi-extension']) != 0:
+ return
+
+ value, _ = self.get_value([list, str], 'gi_c_source_roots', default=[], force_list=True)
+ value.extend([
+ os.path.join(self.state.environment.get_source_dir(),
+ self.interpreter.subproject_dir, self.state.subproject),
+ os.path.join(self.state.environment.get_build_dir(), self.interpreter.subproject_dir, self.state.subproject)
+ ])
+
+ self.cmd += ['--gi-c-source-roots'] + value
+
+ def process_dependencies(self, deps):
+ cflags = set()
+ for dep in mesonlib.listify(ensure_list(deps)):
+ if isinstance(dep, InternalDependency):
+ inc_args = self.state.get_include_args(dep.include_directories)
+ cflags.update([self.replace_dirs_in_string(x)
+ for x in inc_args])
+ cflags.update(self.process_dependencies(dep.libraries))
+ cflags.update(self.process_dependencies(dep.sources))
+ cflags.update(self.process_dependencies(dep.ext_deps))
+ elif isinstance(dep, Dependency):
+ cflags.update(dep.get_compile_args())
+ elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)):
+ self._dependencies.append(dep)
+ for incd in dep.get_include_dirs():
+ cflags.update(incd.get_incdirs())
+ elif isinstance(dep, HotdocTarget):
+ # Recurse in hotdoc target dependencies
+ self.process_dependencies(dep.get_target_dependencies())
+ self._subprojects.extend(dep.subprojects)
+ self.process_dependencies(dep.subprojects)
+ self.add_include_path(os.path.join(self.builddir, dep.hotdoc_conf.subdir))
+ self.cmd += ['--extra-assets=' + p for p in dep.extra_assets]
+ self.add_extension_paths(dep.extra_extension_paths)
+ elif isinstance(dep, build.CustomTarget) or isinstance(dep, build.BuildTarget):
+ self._dependencies.append(dep)
+
+ return [f.strip('-I') for f in cflags]
+
+ def process_extra_assets(self):
+ self._extra_assets, _ = self.get_value("--extra-assets", (str, list), default=[],
+ force_list=True)
+ for assets_path in self._extra_assets:
+ self.cmd.extend(["--extra-assets", assets_path])
+
+ def process_subprojects(self):
+ _, value = self.get_value([
+ list, HotdocTarget], argname="subprojects",
+ force_list=True, value_processor=self.process_dependencies)
+
+ if value is not None:
+ self._subprojects.extend(value)
+
+ def flatten_config_command(self):
+ cmd = []
+ for arg in mesonlib.listify(self.cmd, flatten=True):
+ if isinstance(arg, mesonlib.File):
+ arg = arg.absolute_path(self.state.environment.get_source_dir(),
+ self.state.environment.get_build_dir())
+ elif isinstance(arg, build.IncludeDirs):
+ for inc_dir in arg.get_incdirs():
+ cmd.append(os.path.join(self.sourcedir, arg.get_curdir(), inc_dir))
+ cmd.append(os.path.join(self.builddir, arg.get_curdir(), inc_dir))
+
+ continue
+ elif isinstance(arg, build.CustomTarget) or isinstance(arg, build.BuildTarget):
+ self._dependencies.append(arg)
+ arg = self.interpreter.backend.get_target_filename_abs(arg)
+
+ cmd.append(arg)
+
+ return cmd
+
+ def generate_hotdoc_config(self):
+ cwd = os.path.abspath(os.curdir)
+ ncwd = os.path.join(self.sourcedir, self.subdir)
+ mlog.log('Generating Hotdoc configuration for: ', mlog.bold(self.name))
+ os.chdir(ncwd)
+ self.hotdoc.run_hotdoc(self.flatten_config_command())
+ os.chdir(cwd)
+
+ def ensure_file(self, value):
+ if isinstance(value, list):
+ res = []
+ for val in value:
+ res.append(self.ensure_file(val))
+ return res
+
+ if not isinstance(value, mesonlib.File):
+ return mesonlib.File.from_source_file(self.sourcedir, self.subdir, value)
+
+ return value
+
+ def ensure_dir(self, value):
+ if os.path.isabs(value):
+ _dir = value
+ else:
+ _dir = os.path.join(self.sourcedir, self.subdir, value)
+
+ if not os.path.isdir(_dir):
+ raise InvalidArguments('"%s" is not a directory.' % _dir)
+
+ return os.path.relpath(_dir, os.path.join(self.builddir, self.subdir))
+
+ def check_forbidden_args(self):
+ for arg in ['conf_file']:
+ if arg in self.kwargs:
+ raise InvalidArguments('Argument "%s" is forbidden.' % arg)
+
+ def add_include_path(self, path):
+ self.include_paths[path] = path
+
+ def make_targets(self):
+ self.check_forbidden_args()
+ file_types = (str, mesonlib.File)
+ self.process_known_arg("--index", file_types, mandatory=True, value_processor=self.ensure_file)
+ self.process_known_arg("--project-version", str, mandatory=True)
+ self.process_known_arg("--sitemap", file_types, mandatory=True, value_processor=self.ensure_file)
+ self.process_known_arg("--html-extra-theme", str, value_processor=self.ensure_dir)
+ self.process_known_arg(None, list, "include_paths", force_list=True,
+ value_processor=lambda x: [self.add_include_path(self.ensure_dir(v)) for v in ensure_list(x)])
+ self.process_known_arg('--c-include-directories',
+ [Dependency, build.StaticLibrary, build.SharedLibrary, list], argname="dependencies",
+ force_list=True, value_processor=self.process_dependencies)
+ self.process_gi_c_source_roots()
+ self.process_extra_assets()
+ self.process_extra_extension_paths()
+ self.process_subprojects()
+
+ install, install = self.get_value(bool, "install", mandatory=False)
+ self.process_extra_args()
+
+ fullname = self.name + '-doc'
+ hotdoc_config_name = fullname + '.json'
+ hotdoc_config_path = os.path.join(
+ self.builddir, self.subdir, hotdoc_config_name)
+ with open(hotdoc_config_path, 'w', encoding='utf-8') as f:
+ f.write('{}')
+
+ self.cmd += ['--conf-file', hotdoc_config_path]
+ self.add_include_path(os.path.join(self.builddir, self.subdir))
+ self.add_include_path(os.path.join(self.sourcedir, self.subdir))
+
+ depfile = os.path.join(self.builddir, self.subdir, self.name + '.deps')
+ self.cmd += ['--deps-file-dest', depfile]
+
+ for path in self.include_paths.keys():
+ self.cmd.extend(['--include-path', path])
+
+ if self.state.environment.coredata.get_option(mesonlib.OptionKey('werror', subproject=self.state.subproject)):
+ self.cmd.append('--fatal-warning')
+ self.generate_hotdoc_config()
+
+ target_cmd = self.build_command + ["--internal", "hotdoc"] + \
+ self.hotdoc.get_command() + ['run', '--conf-file', hotdoc_config_name] + \
+ ['--builddir', os.path.join(self.builddir, self.subdir)]
+
+ target = HotdocTarget(fullname,
+ subdir=self.subdir,
+ subproject=self.state.subproject,
+ hotdoc_conf=mesonlib.File.from_built_file(
+ self.subdir, hotdoc_config_name),
+ extra_extension_paths=self._extra_extension_paths,
+ extra_assets=self._extra_assets,
+ subprojects=self._subprojects,
+ command=target_cmd,
+ depends=self._dependencies,
+ output=fullname,
+ depfile=os.path.basename(depfile),
+ build_by_default=self.build_by_default)
+
+ install_script = None
+ if install is True:
+ install_script = self.state.backend.get_executable_serialisation(self.build_command + [
+ "--internal", "hotdoc",
+ "--install", os.path.join(fullname, 'html'),
+ '--name', self.name,
+ '--builddir', os.path.join(self.builddir, self.subdir)] +
+ self.hotdoc.get_command() +
+ ['run', '--conf-file', hotdoc_config_name])
+
+ return (target, install_script)
+
+
+class HotdocTargetHolder(CustomTargetHolder):
+ def __init__(self, target, interp):
+ super().__init__(target, interp)
+ self.methods.update({'config_path': self.config_path_method})
+
+ @noPosargs
+ @noKwargs
+ def config_path_method(self, *args, **kwargs):
+ conf = self.held_object.hotdoc_conf.absolute_path(self.interpreter.environment.source_dir,
+ self.interpreter.environment.build_dir)
+ return conf
+
+
+class HotdocTarget(build.CustomTarget):
+ def __init__(self, name, subdir, subproject, hotdoc_conf, extra_extension_paths, extra_assets,
+ subprojects, **kwargs):
+ super().__init__(name, subdir, subproject, kwargs, absolute_paths=True)
+ self.hotdoc_conf = hotdoc_conf
+ self.extra_extension_paths = extra_extension_paths
+ self.extra_assets = extra_assets
+ self.subprojects = subprojects
+
+ def __getstate__(self):
+ # Make sure we do not try to pickle subprojects
+ res = self.__dict__.copy()
+ res['subprojects'] = []
+
+ return res
+
+
+class HotDocModule(ExtensionModule):
+ @FeatureNew('Hotdoc Module', '0.48.0')
+ def __init__(self, interpreter):
+ super().__init__(interpreter)
+ self.hotdoc = ExternalProgram('hotdoc')
+ if not self.hotdoc.found():
+ raise MesonException('hotdoc executable not found')
+
+ try:
+ from hotdoc.run_hotdoc import run # noqa: F401
+ self.hotdoc.run_hotdoc = run
+ except Exception as e:
+ raise MesonException('hotdoc {} required but not found. ({})'.format(
+ MIN_HOTDOC_VERSION, e))
+ self.methods.update({
+ 'has_extensions': self.has_extensions,
+ 'generate_doc': self.generate_doc,
+ })
+
+ @noKwargs
+ def has_extensions(self, state, args, kwargs):
+ return self.hotdoc.run_hotdoc(['--has-extension=%s' % extension for extension in args]) == 0
+
+ def generate_doc(self, state, args, kwargs):
+ if len(args) != 1:
+ raise MesonException('One positional argument is'
+ ' required for the project name.')
+
+ project_name = args[0]
+ builder = HotdocTargetBuilder(project_name, state, self.hotdoc, self.interpreter, kwargs)
+ target, install_script = builder.make_targets()
+ targets = [target]
+ if install_script:
+ targets.append(install_script)
+
+ return ModuleReturnValue(targets[0], targets)
+
+
+def initialize(interpreter):
+ mod = HotDocModule(interpreter)
+ mod.interpreter.append_holder_map(HotdocTarget, HotdocTargetHolder)
+ return mod
diff --git a/meson/mesonbuild/modules/i18n.py b/meson/mesonbuild/modules/i18n.py
new file mode 100644
index 000000000..a64838b57
--- /dev/null
+++ b/meson/mesonbuild/modules/i18n.py
@@ -0,0 +1,197 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import shutil
+
+from os import path
+from .. import coredata, mesonlib, build, mlog
+from ..mesonlib import MesonException
+from . import ModuleReturnValue
+from . import ExtensionModule
+from ..interpreterbase import permittedKwargs, FeatureNew, FeatureNewKwargs
+
+PRESET_ARGS = {
+ 'glib': [
+ '--from-code=UTF-8',
+ '--add-comments',
+
+ # https://developer.gnome.org/glib/stable/glib-I18N.html
+ '--keyword=_',
+ '--keyword=N_',
+ '--keyword=C_:1c,2',
+ '--keyword=NC_:1c,2',
+ '--keyword=g_dcgettext:2',
+ '--keyword=g_dngettext:2,3',
+ '--keyword=g_dpgettext2:2c,3',
+
+ '--flag=N_:1:pass-c-format',
+ '--flag=C_:2:pass-c-format',
+ '--flag=NC_:2:pass-c-format',
+ '--flag=g_dngettext:2:pass-c-format',
+ '--flag=g_strdup_printf:1:c-format',
+ '--flag=g_string_printf:2:c-format',
+ '--flag=g_string_append_printf:2:c-format',
+ '--flag=g_error_new:3:c-format',
+ '--flag=g_set_error:4:c-format',
+ '--flag=g_markup_printf_escaped:1:c-format',
+ '--flag=g_log:3:c-format',
+ '--flag=g_print:1:c-format',
+ '--flag=g_printerr:1:c-format',
+ '--flag=g_printf:1:c-format',
+ '--flag=g_fprintf:2:c-format',
+ '--flag=g_sprintf:2:c-format',
+ '--flag=g_snprintf:3:c-format',
+ ]
+}
+
+
+class I18nModule(ExtensionModule):
+ def __init__(self, interpreter):
+ super().__init__(interpreter)
+ self.methods.update({
+ 'merge_file': self.merge_file,
+ 'gettext': self.gettext,
+ })
+
+ @staticmethod
+ def nogettext_warning():
+ mlog.warning('Gettext not found, all translation targets will be ignored.', once=True)
+
+ @staticmethod
+ def _get_data_dirs(state, dirs):
+ """Returns source directories of relative paths"""
+ src_dir = path.join(state.environment.get_source_dir(), state.subdir)
+ return [path.join(src_dir, d) for d in dirs]
+
+ @FeatureNew('i18n.merge_file', '0.37.0')
+ @FeatureNewKwargs('i18n.merge_file', '0.51.0', ['args'])
+ @permittedKwargs(build.CustomTarget.known_kwargs | {'data_dirs', 'po_dir', 'type', 'args'})
+ def merge_file(self, state, args, kwargs):
+ if not shutil.which('xgettext'):
+ self.nogettext_warning()
+ return
+ podir = kwargs.pop('po_dir', None)
+ if not podir:
+ raise MesonException('i18n: po_dir is a required kwarg')
+ podir = path.join(state.build_to_src, state.subdir, podir)
+
+ file_type = kwargs.pop('type', 'xml')
+ VALID_TYPES = ('xml', 'desktop')
+ if file_type not in VALID_TYPES:
+ raise MesonException(f'i18n: "{file_type}" is not a valid type {VALID_TYPES}')
+
+ datadirs = self._get_data_dirs(state, mesonlib.stringlistify(kwargs.pop('data_dirs', [])))
+ datadirs = '--datadirs=' + ':'.join(datadirs) if datadirs else None
+
+ command = state.environment.get_build_command() + [
+ '--internal', 'msgfmthelper',
+ '@INPUT@', '@OUTPUT@', file_type, podir
+ ]
+ if datadirs:
+ command.append(datadirs)
+
+ if 'args' in kwargs:
+ command.append('--')
+ command.append(mesonlib.stringlistify(kwargs.pop('args', [])))
+
+ kwargs['command'] = command
+
+ # We only use this input file to create a name of the custom target.
+ # Thus we can ignore the other entries.
+ inputfile = mesonlib.extract_as_list(kwargs, 'input')[0]
+ if isinstance(inputfile, str):
+ inputfile = mesonlib.File.from_source_file(state.environment.source_dir,
+ state.subdir, inputfile)
+ if isinstance(inputfile, mesonlib.File):
+ # output could be '@BASENAME@' in which case we need to do substitutions
+ # to get a unique target name.
+ output = kwargs['output']
+ ifile_abs = inputfile.absolute_path(state.environment.source_dir,
+ state.environment.build_dir)
+ values = mesonlib.get_filenames_templates_dict([ifile_abs], None)
+ outputs = mesonlib.substitute_values([output], values)
+ output = outputs[0]
+ ct = build.CustomTarget(output + '_' + state.subdir.replace('/', '@').replace('\\', '@') + '_merge', state.subdir, state.subproject, kwargs)
+ else:
+ ct = build.CustomTarget(kwargs['output'] + '_merge', state.subdir, state.subproject, kwargs)
+
+ return ModuleReturnValue(ct, [ct])
+
+ @FeatureNewKwargs('i18n.gettext', '0.37.0', ['preset'])
+ @FeatureNewKwargs('i18n.gettext', '0.50.0', ['install_dir'])
+ @permittedKwargs({'po_dir', 'data_dirs', 'type', 'languages', 'args', 'preset', 'install', 'install_dir'})
+ def gettext(self, state, args, kwargs):
+ if len(args) != 1:
+ raise coredata.MesonException('Gettext requires one positional argument (package name).')
+ if not shutil.which('xgettext'):
+ self.nogettext_warning()
+ return
+ packagename = args[0]
+ languages = mesonlib.stringlistify(kwargs.get('languages', []))
+ datadirs = self._get_data_dirs(state, mesonlib.stringlistify(kwargs.get('data_dirs', [])))
+ extra_args = mesonlib.stringlistify(kwargs.get('args', []))
+
+ preset = kwargs.pop('preset', None)
+ if preset:
+ preset_args = PRESET_ARGS.get(preset)
+ if not preset_args:
+ raise coredata.MesonException('i18n: Preset "{}" is not one of the valid options: {}'.format(
+ preset, list(PRESET_ARGS.keys())))
+ extra_args = set(preset_args + extra_args)
+
+ pkg_arg = '--pkgname=' + packagename
+ lang_arg = '--langs=' + '@@'.join(languages) if languages else None
+ datadirs = '--datadirs=' + ':'.join(datadirs) if datadirs else None
+ extra_args = '--extra-args=' + '@@'.join(extra_args) if extra_args else None
+
+ potargs = state.environment.get_build_command() + ['--internal', 'gettext', 'pot', pkg_arg]
+ if datadirs:
+ potargs.append(datadirs)
+ if extra_args:
+ potargs.append(extra_args)
+ pottarget = build.RunTarget(packagename + '-pot', potargs, [], state.subdir, state.subproject)
+
+ gmoargs = state.environment.get_build_command() + ['--internal', 'gettext', 'gen_gmo']
+ if lang_arg:
+ gmoargs.append(lang_arg)
+ gmotarget = build.RunTarget(packagename + '-gmo', gmoargs, [], state.subdir, state.subproject)
+
+ updatepoargs = state.environment.get_build_command() + ['--internal', 'gettext', 'update_po', pkg_arg]
+ if lang_arg:
+ updatepoargs.append(lang_arg)
+ if datadirs:
+ updatepoargs.append(datadirs)
+ if extra_args:
+ updatepoargs.append(extra_args)
+ updatepotarget = build.RunTarget(packagename + '-update-po', updatepoargs, [], state.subdir, state.subproject)
+
+ targets = [pottarget, gmotarget, updatepotarget]
+
+ install = kwargs.get('install', True)
+ if install:
+ install_dir = kwargs.get('install_dir', state.environment.coredata.get_option(mesonlib.OptionKey('localedir')))
+ script = state.environment.get_build_command()
+ args = ['--internal', 'gettext', 'install',
+ '--subdir=' + state.subdir,
+ '--localedir=' + install_dir,
+ pkg_arg]
+ if lang_arg:
+ args.append(lang_arg)
+ iscript = state.backend.get_executable_serialisation(script + args)
+ targets.append(iscript)
+
+ return ModuleReturnValue(None, targets)
+
+def initialize(*args, **kwargs):
+ return I18nModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/keyval.py b/meson/mesonbuild/modules/keyval.py
new file mode 100644
index 000000000..b2d54db01
--- /dev/null
+++ b/meson/mesonbuild/modules/keyval.py
@@ -0,0 +1,72 @@
+# Copyright 2017, 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import ExtensionModule
+
+from .. import mesonlib
+from ..mesonlib import typeslistify
+from ..interpreterbase import FeatureNew, noKwargs, InvalidCode
+
+import os
+
+class KeyvalModule(ExtensionModule):
+
+ @FeatureNew('Keyval Module', '0.55.0')
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.methods.update({
+ 'load': self.load,
+ })
+
+ def _load_file(self, path_to_config):
+ result = dict()
+ try:
+ with open(path_to_config, encoding='utf-8') as f:
+ for line in f:
+ if '#' in line:
+ comment_idx = line.index('#')
+ line = line[:comment_idx]
+ line = line.strip()
+ try:
+ name, val = line.split('=', 1)
+ except ValueError:
+ continue
+ result[name.strip()] = val.strip()
+ except OSError as e:
+ raise mesonlib.MesonException(f'Failed to load {path_to_config}: {e}')
+
+ return result
+
+ @noKwargs
+ def load(self, state, args, kwargs):
+ sources = typeslistify(args, (str, mesonlib.File))
+ if len(sources) != 1:
+ raise InvalidCode('load takes only one file input.')
+
+ s = sources[0]
+ is_built = False
+ if isinstance(s, mesonlib.File):
+ is_built = is_built or s.is_built
+ s = s.absolute_path(self.interpreter.environment.source_dir, self.interpreter.environment.build_dir)
+ else:
+ s = os.path.join(self.interpreter.environment.source_dir, s)
+
+ if s not in self.interpreter.build_def_files and not is_built:
+ self.interpreter.build_def_files.append(s)
+
+ return self._load_file(s)
+
+
+def initialize(*args, **kwargs):
+ return KeyvalModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/modtest.py b/meson/mesonbuild/modules/modtest.py
new file mode 100644
index 000000000..dd2e2ff8d
--- /dev/null
+++ b/meson/mesonbuild/modules/modtest.py
@@ -0,0 +1,30 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import ExtensionModule
+from ..interpreterbase import noKwargs
+
+class TestModule(ExtensionModule):
+ def __init__(self, interpreter):
+ super().__init__(interpreter)
+ self.methods.update({
+ 'print_hello': self.print_hello,
+ })
+
+ @noKwargs
+ def print_hello(self, state, args, kwargs):
+ print('Hello from a Meson module')
+
+def initialize(*args, **kwargs):
+ return TestModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/pkgconfig.py b/meson/mesonbuild/modules/pkgconfig.py
new file mode 100644
index 000000000..c6eaedca5
--- /dev/null
+++ b/meson/mesonbuild/modules/pkgconfig.py
@@ -0,0 +1,591 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from pathlib import PurePath
+
+from .. import build
+from .. import dependencies
+from ..dependencies import ThreadDependency
+from .. import mesonlib
+from .. import mlog
+from . import ModuleReturnValue
+from . import ExtensionModule
+from ..interpreterbase import permittedKwargs, FeatureNew, FeatureNewKwargs
+
+already_warned_objs = set()
+
+class DependenciesHelper:
+ def __init__(self, state, name):
+ self.state = state
+ self.name = name
+ self.pub_libs = []
+ self.pub_reqs = []
+ self.priv_libs = []
+ self.priv_reqs = []
+ self.cflags = []
+ self.version_reqs = {}
+ self.link_whole_targets = []
+
+ def add_pub_libs(self, libs):
+ libs, reqs, cflags = self._process_libs(libs, True)
+ self.pub_libs = libs + self.pub_libs # prepend to preserve dependencies
+ self.pub_reqs += reqs
+ self.cflags += cflags
+
+ def add_priv_libs(self, libs):
+ libs, reqs, _ = self._process_libs(libs, False)
+ self.priv_libs = libs + self.priv_libs
+ self.priv_reqs += reqs
+
+ def add_pub_reqs(self, reqs):
+ self.pub_reqs += self._process_reqs(reqs)
+
+ def add_priv_reqs(self, reqs):
+ self.priv_reqs += self._process_reqs(reqs)
+
+ def _check_generated_pc_deprecation(self, obj):
+ if not hasattr(obj, 'generated_pc_warn'):
+ return
+ name = obj.generated_pc_warn[0]
+ if (name, obj.name) in already_warned_objs:
+ return
+ mlog.deprecation('Library', mlog.bold(obj.name), 'was passed to the '
+ '"libraries" keyword argument of a previous call '
+ 'to generate() method instead of first positional '
+ 'argument.', 'Adding', mlog.bold(obj.generated_pc),
+ 'to "Requires" field, but this is a deprecated '
+ 'behaviour that will change in a future version '
+ 'of Meson. Please report the issue if this '
+ 'warning cannot be avoided in your case.',
+ location=obj.generated_pc_warn[1])
+ already_warned_objs.add((name, obj.name))
+
+ def _process_reqs(self, reqs):
+ '''Returns string names of requirements'''
+ processed_reqs = []
+ for obj in mesonlib.listify(reqs):
+ if not isinstance(obj, str):
+ FeatureNew.single_use('pkgconfig.generate requirement from non-string object', '0.46.0', self.state.subproject)
+ if hasattr(obj, 'generated_pc'):
+ self._check_generated_pc_deprecation(obj)
+ processed_reqs.append(obj.generated_pc)
+ elif hasattr(obj, 'pcdep'):
+ pcdeps = mesonlib.listify(obj.pcdep)
+ for d in pcdeps:
+ processed_reqs.append(d.name)
+ self.add_version_reqs(d.name, obj.version_reqs)
+ elif isinstance(obj, dependencies.PkgConfigDependency):
+ if obj.found():
+ processed_reqs.append(obj.name)
+ self.add_version_reqs(obj.name, obj.version_reqs)
+ elif isinstance(obj, str):
+ name, version_req = self.split_version_req(obj)
+ processed_reqs.append(name)
+ self.add_version_reqs(name, version_req)
+ elif isinstance(obj, dependencies.Dependency) and not obj.found():
+ pass
+ elif isinstance(obj, ThreadDependency):
+ pass
+ else:
+ raise mesonlib.MesonException('requires argument not a string, '
+ 'library with pkgconfig-generated file '
+ 'or pkgconfig-dependency object, '
+ 'got {!r}'.format(obj))
+ return processed_reqs
+
+ def add_cflags(self, cflags):
+ self.cflags += mesonlib.stringlistify(cflags)
+
+ def _process_libs(self, libs, public: bool):
+ libs = mesonlib.listify(libs)
+ processed_libs = []
+ processed_reqs = []
+ processed_cflags = []
+ for obj in libs:
+ if hasattr(obj, 'pcdep'):
+ pcdeps = mesonlib.listify(obj.pcdep)
+ for d in pcdeps:
+ processed_reqs.append(d.name)
+ self.add_version_reqs(d.name, obj.version_reqs)
+ elif hasattr(obj, 'generated_pc'):
+ self._check_generated_pc_deprecation(obj)
+ processed_reqs.append(obj.generated_pc)
+ elif isinstance(obj, dependencies.PkgConfigDependency):
+ if obj.found():
+ processed_reqs.append(obj.name)
+ self.add_version_reqs(obj.name, obj.version_reqs)
+ elif isinstance(obj, dependencies.InternalDependency):
+ if obj.found():
+ processed_libs += obj.get_link_args()
+ processed_cflags += obj.get_compile_args()
+ self._add_lib_dependencies(obj.libraries, obj.whole_libraries, obj.ext_deps, public, private_external_deps=True)
+ elif isinstance(obj, dependencies.Dependency):
+ if obj.found():
+ processed_libs += obj.get_link_args()
+ processed_cflags += obj.get_compile_args()
+ elif isinstance(obj, build.SharedLibrary) and obj.shared_library_only:
+ # Do not pull dependencies for shared libraries because they are
+ # only required for static linking. Adding private requires has
+ # the side effect of exposing their cflags, which is the
+ # intended behaviour of pkg-config but force Debian to add more
+ # than needed build deps.
+ # See https://bugs.freedesktop.org/show_bug.cgi?id=105572
+ processed_libs.append(obj)
+ elif isinstance(obj, (build.SharedLibrary, build.StaticLibrary)):
+ processed_libs.append(obj)
+ # If there is a static library in `Libs:` all its deps must be
+ # public too, otherwise the generated pc file will never be
+ # usable without --static.
+ self._add_lib_dependencies(obj.link_targets,
+ obj.link_whole_targets,
+ obj.external_deps,
+ isinstance(obj, build.StaticLibrary) and public)
+ elif isinstance(obj, (build.CustomTarget, build.CustomTargetIndex)):
+ if not obj.is_linkable_target():
+ raise mesonlib.MesonException('library argument contains a not linkable custom_target.')
+ FeatureNew.single_use('custom_target in pkgconfig.generate libraries', '0.58.0', self.state.subproject)
+ processed_libs.append(obj)
+ elif isinstance(obj, str):
+ processed_libs.append(obj)
+ else:
+ raise mesonlib.MesonException(f'library argument of type {type(obj).__name__} not a string, library or dependency object.')
+
+ return processed_libs, processed_reqs, processed_cflags
+
+ def _add_lib_dependencies(self, link_targets, link_whole_targets, external_deps, public, private_external_deps=False):
+ add_libs = self.add_pub_libs if public else self.add_priv_libs
+ # Recursively add all linked libraries
+ for t in link_targets:
+ # Internal libraries (uninstalled static library) will be promoted
+ # to link_whole, treat them as such here.
+ if t.is_internal():
+ self._add_link_whole(t, public)
+ else:
+ add_libs([t])
+ for t in link_whole_targets:
+ self._add_link_whole(t, public)
+ # And finally its external dependencies
+ if private_external_deps:
+ self.add_priv_libs(external_deps)
+ else:
+ add_libs(external_deps)
+
+ def _add_link_whole(self, t, public):
+ # Don't include static libraries that we link_whole. But we still need to
+ # include their dependencies: a static library we link_whole
+ # could itself link to a shared library or an installed static library.
+ # Keep track of link_whole_targets so we can remove them from our
+ # lists in case a library is link_with and link_whole at the same time.
+ # See remove_dups() below.
+ self.link_whole_targets.append(t)
+ self._add_lib_dependencies(t.link_targets, t.link_whole_targets, t.external_deps, public)
+
+ def add_version_reqs(self, name, version_reqs):
+ if version_reqs:
+ if name not in self.version_reqs:
+ self.version_reqs[name] = set()
+ # Note that pkg-config is picky about whitespace.
+ # 'foo > 1.2' is ok but 'foo>1.2' is not.
+ # foo, bar' is ok, but 'foo,bar' is not.
+ new_vreqs = [s for s in mesonlib.stringlistify(version_reqs)]
+ self.version_reqs[name].update(new_vreqs)
+
+ def split_version_req(self, s):
+ for op in ['>=', '<=', '!=', '==', '=', '>', '<']:
+ pos = s.find(op)
+ if pos > 0:
+ return s[0:pos].strip(), s[pos:].strip()
+ return s, None
+
+ def format_vreq(self, vreq):
+ # vreq are '>=1.0' and pkgconfig wants '>= 1.0'
+ for op in ['>=', '<=', '!=', '==', '=', '>', '<']:
+ if vreq.startswith(op):
+ return op + ' ' + vreq[len(op):]
+ return vreq
+
+ def format_reqs(self, reqs):
+ result = []
+ for name in reqs:
+ vreqs = self.version_reqs.get(name, None)
+ if vreqs:
+ result += [name + ' ' + self.format_vreq(vreq) for vreq in vreqs]
+ else:
+ result += [name]
+ return ', '.join(result)
+
+ def remove_dups(self):
+ # Set of ids that have already been handled and should not be added any more
+ exclude = set()
+
+ # We can't just check if 'x' is excluded because we could have copies of
+ # the same SharedLibrary object for example.
+ def _ids(x):
+ if hasattr(x, 'generated_pc'):
+ yield x.generated_pc
+ if isinstance(x, build.Target):
+ yield x.get_id()
+ yield x
+
+ # Exclude 'x' in all its forms and return if it was already excluded
+ def _add_exclude(x):
+ was_excluded = False
+ for i in _ids(x):
+ if i in exclude:
+ was_excluded = True
+ else:
+ exclude.add(i)
+ return was_excluded
+
+ # link_whole targets are already part of other targets, exclude them all.
+ for t in self.link_whole_targets:
+ _add_exclude(t)
+
+ def _fn(xs, libs=False):
+ # Remove duplicates whilst preserving original order
+ result = []
+ for x in xs:
+ # Don't de-dup unknown strings to avoid messing up arguments like:
+ # ['-framework', 'CoreAudio', '-framework', 'CoreMedia']
+ known_flags = ['-pthread']
+ cannot_dedup = libs and isinstance(x, str) and \
+ not x.startswith(('-l', '-L')) and \
+ x not in known_flags
+ if not cannot_dedup and _add_exclude(x):
+ continue
+ result.append(x)
+ return result
+
+ # Handle lists in priority order: public items can be excluded from
+ # private and Requires can excluded from Libs.
+ self.pub_reqs = _fn(self.pub_reqs)
+ self.pub_libs = _fn(self.pub_libs, True)
+ self.priv_reqs = _fn(self.priv_reqs)
+ self.priv_libs = _fn(self.priv_libs, True)
+ # Reset exclude list just in case some values can be both cflags and libs.
+ exclude = set()
+ self.cflags = _fn(self.cflags)
+
+class PkgConfigModule(ExtensionModule):
+ def __init__(self, interpreter):
+ super().__init__(interpreter)
+ self.methods.update({
+ 'generate': self.generate,
+ })
+
+ def _get_lname(self, l, msg, pcfile, is_custom_target):
+ if is_custom_target:
+ basename = os.path.basename(l.get_filename())
+ name = os.path.splitext(basename)[0]
+ if name.startswith('lib'):
+ name = name[3:]
+ return name
+ # Nothing special
+ if not l.name_prefix_set:
+ return l.name
+ # Sometimes people want the library to start with 'lib' everywhere,
+ # which is achieved by setting name_prefix to '' and the target name to
+ # 'libfoo'. In that case, try to get the pkg-config '-lfoo' arg correct.
+ if l.prefix == '' and l.name.startswith('lib'):
+ return l.name[3:]
+ # If the library is imported via an import library which is always
+ # named after the target name, '-lfoo' is correct.
+ if isinstance(l, build.SharedLibrary) and l.import_filename:
+ return l.name
+ # In other cases, we can't guarantee that the compiler will be able to
+ # find the library via '-lfoo', so tell the user that.
+ mlog.warning(msg.format(l.name, 'name_prefix', l.name, pcfile))
+ return l.name
+
+ def _escape(self, value):
+ '''
+ We cannot use quote_arg because it quotes with ' and " which does not
+ work with pkg-config and pkgconf at all.
+ '''
+ # We should always write out paths with / because pkg-config requires
+ # spaces to be quoted with \ and that messes up on Windows:
+ # https://bugs.freedesktop.org/show_bug.cgi?id=103203
+ if isinstance(value, PurePath):
+ value = value.as_posix()
+ return value.replace(' ', r'\ ')
+
+ def _make_relative(self, prefix, subdir):
+ prefix = PurePath(prefix)
+ subdir = PurePath(subdir)
+ try:
+ return subdir.relative_to(prefix).as_posix()
+ except ValueError:
+ return subdir.as_posix()
+
+ def _generate_pkgconfig_file(self, state, deps, subdirs, name, description,
+ url, version, pcfile, conflicts, variables,
+ unescaped_variables, uninstalled=False, dataonly=False):
+ coredata = state.environment.get_coredata()
+ if uninstalled:
+ outdir = os.path.join(state.environment.build_dir, 'meson-uninstalled')
+ if not os.path.exists(outdir):
+ os.mkdir(outdir)
+ prefix = PurePath(state.environment.get_build_dir())
+ srcdir = PurePath(state.environment.get_source_dir())
+ else:
+ outdir = state.environment.scratch_dir
+ prefix = PurePath(coredata.get_option(mesonlib.OptionKey('prefix')))
+ # These always return paths relative to prefix
+ libdir = PurePath(coredata.get_option(mesonlib.OptionKey('libdir')))
+ incdir = PurePath(coredata.get_option(mesonlib.OptionKey('includedir')))
+ fname = os.path.join(outdir, pcfile)
+ with open(fname, 'w', encoding='utf-8') as ofile:
+ if not dataonly:
+ ofile.write('prefix={}\n'.format(self._escape(prefix)))
+ if uninstalled:
+ ofile.write('srcdir={}\n'.format(self._escape(srcdir)))
+ ofile.write('libdir={}\n'.format(self._escape('${prefix}' / libdir)))
+ ofile.write('includedir={}\n'.format(self._escape('${prefix}' / incdir)))
+ if variables or unescaped_variables:
+ ofile.write('\n')
+ for k, v in variables:
+ ofile.write('{}={}\n'.format(k, self._escape(v)))
+ for k, v in unescaped_variables:
+ ofile.write(f'{k}={v}\n')
+ ofile.write('\n')
+ ofile.write('Name: %s\n' % name)
+ if len(description) > 0:
+ ofile.write('Description: %s\n' % description)
+ if len(url) > 0:
+ ofile.write('URL: %s\n' % url)
+ ofile.write('Version: %s\n' % version)
+ reqs_str = deps.format_reqs(deps.pub_reqs)
+ if len(reqs_str) > 0:
+ ofile.write(f'Requires: {reqs_str}\n')
+ reqs_str = deps.format_reqs(deps.priv_reqs)
+ if len(reqs_str) > 0:
+ ofile.write(f'Requires.private: {reqs_str}\n')
+ if len(conflicts) > 0:
+ ofile.write('Conflicts: {}\n'.format(' '.join(conflicts)))
+
+ def generate_libs_flags(libs):
+ msg = 'Library target {0!r} has {1!r} set. Compilers ' \
+ 'may not find it from its \'-l{2}\' linker flag in the ' \
+ '{3!r} pkg-config file.'
+ Lflags = []
+ for l in libs:
+ if isinstance(l, str):
+ yield l
+ else:
+ if uninstalled:
+ install_dir = os.path.dirname(state.backend.get_target_filename_abs(l))
+ else:
+ install_dir = l.get_custom_install_dir()[0]
+ if install_dir is False:
+ continue
+ is_custom_target = isinstance(l, (build.CustomTarget, build.CustomTargetIndex))
+ if not is_custom_target and 'cs' in l.compilers:
+ if isinstance(install_dir, str):
+ Lflag = '-r${{prefix}}/{}/{}'.format(self._escape(self._make_relative(prefix, install_dir)), l.filename)
+ else: # install_dir is True
+ Lflag = '-r${libdir}/%s' % l.filename
+ else:
+ if isinstance(install_dir, str):
+ Lflag = '-L${prefix}/%s' % self._escape(self._make_relative(prefix, install_dir))
+ else: # install_dir is True
+ Lflag = '-L${libdir}'
+ if Lflag not in Lflags:
+ Lflags.append(Lflag)
+ yield Lflag
+ lname = self._get_lname(l, msg, pcfile, is_custom_target)
+ # If using a custom suffix, the compiler may not be able to
+ # find the library
+ if not is_custom_target and l.name_suffix_set:
+ mlog.warning(msg.format(l.name, 'name_suffix', lname, pcfile))
+ if is_custom_target or 'cs' not in l.compilers:
+ yield '-l%s' % lname
+
+ def get_uninstalled_include_dirs(libs):
+ result = []
+ for l in libs:
+ if isinstance(l, (str, build.CustomTarget, build.CustomTargetIndex)):
+ continue
+ if l.get_subdir() not in result:
+ result.append(l.get_subdir())
+ for i in l.get_include_dirs():
+ curdir = i.get_curdir()
+ for d in i.get_incdirs():
+ path = os.path.join(curdir, d)
+ if path not in result:
+ result.append(path)
+ return result
+
+ def generate_uninstalled_cflags(libs):
+ for d in get_uninstalled_include_dirs(libs):
+ for basedir in ['${prefix}', '${srcdir}']:
+ path = PurePath(basedir, d)
+ yield '-I%s' % self._escape(path.as_posix())
+
+ if len(deps.pub_libs) > 0:
+ ofile.write('Libs: {}\n'.format(' '.join(generate_libs_flags(deps.pub_libs))))
+ if len(deps.priv_libs) > 0:
+ ofile.write('Libs.private: {}\n'.format(' '.join(generate_libs_flags(deps.priv_libs))))
+
+ cflags = []
+ if uninstalled:
+ cflags += generate_uninstalled_cflags(deps.pub_libs + deps.priv_libs)
+ else:
+ for d in subdirs:
+ if d == '.':
+ cflags.append('-I${includedir}')
+ else:
+ cflags.append(self._escape(PurePath('-I${includedir}') / d))
+ cflags += [self._escape(f) for f in deps.cflags]
+ if cflags and not dataonly:
+ ofile.write('Cflags: {}\n'.format(' '.join(cflags)))
+
+ @FeatureNewKwargs('pkgconfig.generate', '0.59.0', ['unescaped_variables', 'unescaped_uninstalled_variables'])
+ @FeatureNewKwargs('pkgconfig.generate', '0.54.0', ['uninstalled_variables'])
+ @FeatureNewKwargs('pkgconfig.generate', '0.42.0', ['extra_cflags'])
+ @FeatureNewKwargs('pkgconfig.generate', '0.41.0', ['variables'])
+ @FeatureNewKwargs('pkgconfig.generate', '0.54.0', ['dataonly'])
+ @permittedKwargs({'libraries', 'version', 'name', 'description', 'filebase',
+ 'subdirs', 'requires', 'requires_private', 'libraries_private',
+ 'install_dir', 'extra_cflags', 'variables', 'url', 'd_module_versions',
+ 'dataonly', 'conflicts', 'uninstalled_variables',
+ 'unescaped_variables', 'unescaped_uninstalled_variables'})
+ def generate(self, state, args, kwargs):
+ default_version = state.project_version['version']
+ default_install_dir = None
+ default_description = None
+ default_name = None
+ mainlib = None
+ default_subdirs = ['.']
+ if not args and 'version' not in kwargs:
+ FeatureNew.single_use('pkgconfig.generate implicit version keyword', '0.46.0', state.subproject)
+ elif len(args) == 1:
+ FeatureNew.single_use('pkgconfig.generate optional positional argument', '0.46.0', state.subproject)
+ mainlib = args[0]
+ if not isinstance(mainlib, (build.StaticLibrary, build.SharedLibrary)):
+ raise mesonlib.MesonException('Pkgconfig_gen first positional argument must be a library object')
+ default_name = mainlib.name
+ default_description = state.project_name + ': ' + mainlib.name
+ install_dir = mainlib.get_custom_install_dir()[0]
+ if isinstance(install_dir, str):
+ default_install_dir = os.path.join(install_dir, 'pkgconfig')
+ elif len(args) > 1:
+ raise mesonlib.MesonException('Too many positional arguments passed to Pkgconfig_gen.')
+
+ dataonly = kwargs.get('dataonly', False)
+ if not isinstance(dataonly, bool):
+ raise mesonlib.MesonException('dataonly must be boolean.')
+ if dataonly:
+ default_subdirs = []
+ blocked_vars = ['libraries', 'libraries_private', 'require_private', 'extra_cflags', 'subdirs']
+ if any(k in kwargs for k in blocked_vars):
+ raise mesonlib.MesonException(f'Cannot combine dataonly with any of {blocked_vars}')
+
+ subdirs = mesonlib.stringlistify(kwargs.get('subdirs', default_subdirs))
+ version = kwargs.get('version', default_version)
+ if not isinstance(version, str):
+ raise mesonlib.MesonException('Version must be specified.')
+ name = kwargs.get('name', default_name)
+ if not isinstance(name, str):
+ raise mesonlib.MesonException('Name not specified.')
+ filebase = kwargs.get('filebase', name)
+ if not isinstance(filebase, str):
+ raise mesonlib.MesonException('Filebase must be a string.')
+ description = kwargs.get('description', default_description)
+ if not isinstance(description, str):
+ raise mesonlib.MesonException('Description is not a string.')
+ url = kwargs.get('url', '')
+ if not isinstance(url, str):
+ raise mesonlib.MesonException('URL is not a string.')
+ conflicts = mesonlib.stringlistify(kwargs.get('conflicts', []))
+
+ # Prepend the main library to public libraries list. This is required
+ # so dep.add_pub_libs() can handle dependency ordering correctly and put
+ # extra libraries after the main library.
+ libraries = mesonlib.extract_as_list(kwargs, 'libraries')
+ if mainlib:
+ libraries = [mainlib] + libraries
+
+ deps = DependenciesHelper(state, filebase)
+ deps.add_pub_libs(libraries)
+ deps.add_priv_libs(kwargs.get('libraries_private', []))
+ deps.add_pub_reqs(kwargs.get('requires', []))
+ deps.add_priv_reqs(kwargs.get('requires_private', []))
+ deps.add_cflags(kwargs.get('extra_cflags', []))
+
+ dversions = kwargs.get('d_module_versions', None)
+ if dversions:
+ compiler = state.environment.coredata.compilers.host.get('d')
+ if compiler:
+ deps.add_cflags(compiler.get_feature_args({'versions': dversions}, None))
+
+ deps.remove_dups()
+
+ def parse_variable_list(vardict):
+ reserved = ['prefix', 'libdir', 'includedir']
+ variables = []
+ for name, value in vardict.items():
+ if not dataonly and name in reserved:
+ raise mesonlib.MesonException(f'Variable "{name}" is reserved')
+ variables.append((name, value))
+ return variables
+
+ variables = self.interpreter.extract_variables(kwargs, dict_new=True)
+ variables = parse_variable_list(variables)
+ unescaped_variables = self.interpreter.extract_variables(kwargs, argname='unescaped_variables')
+ unescaped_variables = parse_variable_list(unescaped_variables)
+
+ pcfile = filebase + '.pc'
+ pkgroot = kwargs.get('install_dir', default_install_dir)
+ if pkgroot is None:
+ if mesonlib.is_freebsd():
+ pkgroot = os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('prefix')), 'libdata', 'pkgconfig')
+ else:
+ pkgroot = os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('libdir')), 'pkgconfig')
+ if not isinstance(pkgroot, str):
+ raise mesonlib.MesonException('Install_dir must be a string.')
+ self._generate_pkgconfig_file(state, deps, subdirs, name, description, url,
+ version, pcfile, conflicts, variables,
+ unescaped_variables, False, dataonly)
+ res = build.Data([mesonlib.File(True, state.environment.get_scratch_dir(), pcfile)], pkgroot, None, state.subproject)
+ variables = self.interpreter.extract_variables(kwargs, argname='uninstalled_variables', dict_new=True)
+ variables = parse_variable_list(variables)
+ unescaped_variables = self.interpreter.extract_variables(kwargs, argname='unescaped_uninstalled_variables')
+ unescaped_variables = parse_variable_list(unescaped_variables)
+
+ pcfile = filebase + '-uninstalled.pc'
+ self._generate_pkgconfig_file(state, deps, subdirs, name, description, url,
+ version, pcfile, conflicts, variables,
+ unescaped_variables, uninstalled=True, dataonly=dataonly)
+ # Associate the main library with this generated pc file. If the library
+ # is used in any subsequent call to the generated, it will generate a
+ # 'Requires:' or 'Requires.private:'.
+ # Backward compatibility: We used to set 'generated_pc' on all public
+ # libraries instead of just the main one. Keep doing that but warn if
+ # anyone is relying on that deprecated behaviour.
+ if mainlib:
+ if not hasattr(mainlib, 'generated_pc'):
+ mainlib.generated_pc = filebase
+ else:
+ mlog.warning('Already generated a pkg-config file for', mlog.bold(mainlib.name))
+ else:
+ for lib in deps.pub_libs:
+ if not isinstance(lib, str) and not hasattr(lib, 'generated_pc'):
+ lib.generated_pc = filebase
+ location = state.current_node
+ lib.generated_pc_warn = [name, location]
+ return ModuleReturnValue(res, [res])
+
+def initialize(*args, **kwargs):
+ return PkgConfigModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/python.py b/meson/mesonbuild/modules/python.py
new file mode 100644
index 000000000..c4ebbc77b
--- /dev/null
+++ b/meson/mesonbuild/modules/python.py
@@ -0,0 +1,661 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import json
+import shutil
+import typing as T
+
+from pathlib import Path
+from .. import mesonlib
+from ..mesonlib import MachineChoice, MesonException
+from . import ExtensionModule
+from ..interpreterbase import (
+ noPosargs, noKwargs, permittedKwargs,
+ InvalidArguments,
+ FeatureNew, FeatureNewKwargs, disablerIfNotFound
+)
+from ..interpreter import ExternalProgramHolder, extract_required_kwarg, permitted_dependency_kwargs
+from ..build import known_shmod_kwargs
+from .. import mlog
+from ..environment import detect_cpu_family
+from ..dependencies import DependencyMethods, PkgConfigDependency, NotFoundDependency, SystemDependency
+from ..programs import ExternalProgram, NonExistingExternalProgram
+
+mod_kwargs = {'subdir'}
+mod_kwargs.update(known_shmod_kwargs)
+mod_kwargs -= {'name_prefix', 'name_suffix'}
+
+class PythonDependency(SystemDependency):
+
+ def __init__(self, python_holder, environment, kwargs):
+ super().__init__('python', environment, kwargs)
+ self.name = 'python'
+ self.static = kwargs.get('static', False)
+ self.embed = kwargs.get('embed', False)
+ self.version = python_holder.version
+ self.platform = python_holder.platform
+ self.pkgdep = None
+ self.variables = python_holder.variables
+ self.paths = python_holder.paths
+ self.link_libpython = python_holder.link_libpython
+ self.info: T.Optional[T.Dict[str, str]] = None
+ if mesonlib.version_compare(self.version, '>= 3.0'):
+ self.major_version = 3
+ else:
+ self.major_version = 2
+
+ # We first try to find the necessary python variables using pkgconfig
+ if DependencyMethods.PKGCONFIG in self.methods and not python_holder.is_pypy:
+ pkg_version = self.variables.get('LDVERSION') or self.version
+ pkg_libdir = self.variables.get('LIBPC')
+ pkg_embed = '-embed' if self.embed and mesonlib.version_compare(self.version, '>=3.8') else ''
+ pkg_name = f'python-{pkg_version}{pkg_embed}'
+
+ # If python-X.Y.pc exists in LIBPC, we will try to use it
+ if pkg_libdir is not None and Path(os.path.join(pkg_libdir, f'{pkg_name}.pc')).is_file():
+ old_pkg_libdir = os.environ.get('PKG_CONFIG_LIBDIR')
+ old_pkg_path = os.environ.get('PKG_CONFIG_PATH')
+
+ os.environ.pop('PKG_CONFIG_PATH', None)
+
+ if pkg_libdir:
+ os.environ['PKG_CONFIG_LIBDIR'] = pkg_libdir
+
+ try:
+ self.pkgdep = PkgConfigDependency(pkg_name, environment, kwargs)
+ mlog.debug(f'Found "{pkg_name}" via pkgconfig lookup in LIBPC ({pkg_libdir})')
+ py_lookup_method = 'pkgconfig'
+ except MesonException as e:
+ mlog.debug(f'"{pkg_name}" could not be found in LIBPC ({pkg_libdir})')
+ mlog.debug(e)
+
+ if old_pkg_path is not None:
+ os.environ['PKG_CONFIG_PATH'] = old_pkg_path
+
+ if old_pkg_libdir is not None:
+ os.environ['PKG_CONFIG_LIBDIR'] = old_pkg_libdir
+ else:
+ os.environ.pop('PKG_CONFIG_LIBDIR', None)
+ else:
+ mlog.debug(f'"{pkg_name}" could not be found in LIBPC ({pkg_libdir}), this is likely due to a relocated python installation')
+
+ # If lookup via LIBPC failed, try to use fallback PKG_CONFIG_LIBDIR/PKG_CONFIG_PATH mechanisms
+ if self.pkgdep is None or not self.pkgdep.found():
+ try:
+ self.pkgdep = PkgConfigDependency(pkg_name, environment, kwargs)
+ mlog.debug(f'Found "{pkg_name}" via fallback pkgconfig lookup in PKG_CONFIG_LIBDIR/PKG_CONFIG_PATH')
+ py_lookup_method = 'pkgconfig-fallback'
+ except MesonException as e:
+ mlog.debug(f'"{pkg_name}" could not be found via fallback pkgconfig lookup in PKG_CONFIG_LIBDIR/PKG_CONFIG_PATH')
+ mlog.debug(e)
+
+ if self.pkgdep and self.pkgdep.found():
+ self.compile_args = self.pkgdep.get_compile_args()
+ self.link_args = self.pkgdep.get_link_args()
+ self.is_found = True
+ self.pcdep = self.pkgdep
+ else:
+ self.pkgdep = None
+
+ # Finally, try to find python via SYSCONFIG as a final measure
+ if DependencyMethods.SYSCONFIG in self.methods:
+ if mesonlib.is_windows():
+ self._find_libpy_windows(environment)
+ else:
+ self._find_libpy(python_holder, environment)
+ if self.is_found:
+ mlog.debug(f'Found "python-{self.version}" via SYSCONFIG module')
+ py_lookup_method = 'sysconfig'
+
+ if self.is_found:
+ mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.green(f'YES ({py_lookup_method})'))
+ else:
+ mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.red('NO'))
+
+ def _find_libpy(self, python_holder, environment):
+ if python_holder.is_pypy:
+ if self.major_version == 3:
+ libname = 'pypy3-c'
+ else:
+ libname = 'pypy-c'
+ libdir = os.path.join(self.variables.get('base'), 'bin')
+ libdirs = [libdir]
+ else:
+ libname = f'python{self.version}'
+ if 'DEBUG_EXT' in self.variables:
+ libname += self.variables['DEBUG_EXT']
+ if 'ABIFLAGS' in self.variables:
+ libname += self.variables['ABIFLAGS']
+ libdirs = []
+
+ largs = self.clib_compiler.find_library(libname, environment, libdirs)
+ if largs is not None:
+ self.link_args = largs
+
+ self.is_found = largs is not None or self.link_libpython
+
+ inc_paths = mesonlib.OrderedSet([
+ self.variables.get('INCLUDEPY'),
+ self.paths.get('include'),
+ self.paths.get('platinclude')])
+
+ self.compile_args += ['-I' + path for path in inc_paths if path]
+
+ def get_windows_python_arch(self):
+ if self.platform == 'mingw':
+ pycc = self.variables.get('CC')
+ if pycc.startswith('x86_64'):
+ return '64'
+ elif pycc.startswith(('i686', 'i386')):
+ return '32'
+ else:
+ mlog.log('MinGW Python built with unknown CC {!r}, please file'
+ 'a bug'.format(pycc))
+ return None
+ elif self.platform == 'win32':
+ return '32'
+ elif self.platform in ('win64', 'win-amd64'):
+ return '64'
+ mlog.log(f'Unknown Windows Python platform {self.platform!r}')
+ return None
+
+ def get_windows_link_args(self):
+ if self.platform.startswith('win'):
+ vernum = self.variables.get('py_version_nodot')
+ if self.static:
+ libpath = Path('libs') / f'libpython{vernum}.a'
+ else:
+ comp = self.get_compiler()
+ if comp.id == "gcc":
+ libpath = f'python{vernum}.dll'
+ else:
+ libpath = Path('libs') / f'python{vernum}.lib'
+ lib = Path(self.variables.get('base')) / libpath
+ elif self.platform == 'mingw':
+ if self.static:
+ libname = self.variables.get('LIBRARY')
+ else:
+ libname = self.variables.get('LDLIBRARY')
+ lib = Path(self.variables.get('LIBDIR')) / libname
+ if not lib.exists():
+ mlog.log('Could not find Python3 library {!r}'.format(str(lib)))
+ return None
+ return [str(lib)]
+
+ def _find_libpy_windows(self, env):
+ '''
+ Find python3 libraries on Windows and also verify that the arch matches
+ what we are building for.
+ '''
+ pyarch = self.get_windows_python_arch()
+ if pyarch is None:
+ self.is_found = False
+ return
+ arch = detect_cpu_family(env.coredata.compilers.host)
+ if arch == 'x86':
+ arch = '32'
+ elif arch == 'x86_64':
+ arch = '64'
+ else:
+ # We can't cross-compile Python 3 dependencies on Windows yet
+ mlog.log(f'Unknown architecture {arch!r} for',
+ mlog.bold(self.name))
+ self.is_found = False
+ return
+ # Pyarch ends in '32' or '64'
+ if arch != pyarch:
+ mlog.log('Need', mlog.bold(self.name), 'for {}-bit, but '
+ 'found {}-bit'.format(arch, pyarch))
+ self.is_found = False
+ return
+ # This can fail if the library is not found
+ largs = self.get_windows_link_args()
+ if largs is None:
+ self.is_found = False
+ return
+ self.link_args = largs
+ # Compile args
+ inc_paths = mesonlib.OrderedSet([
+ self.variables.get('INCLUDEPY'),
+ self.paths.get('include'),
+ self.paths.get('platinclude')])
+
+ self.compile_args += ['-I' + path for path in inc_paths if path]
+
+ # https://sourceforge.net/p/mingw-w64/mailman/message/30504611/
+ if pyarch == '64' and self.major_version == 2:
+ self.compile_args += ['-DMS_WIN64']
+
+ self.is_found = True
+
+ @staticmethod
+ def get_methods():
+ if mesonlib.is_windows():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSCONFIG]
+ elif mesonlib.is_osx():
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.EXTRAFRAMEWORK]
+ else:
+ return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSCONFIG]
+
+ def get_pkgconfig_variable(self, variable_name, kwargs):
+ if self.pkgdep:
+ return self.pkgdep.get_pkgconfig_variable(variable_name, kwargs)
+ else:
+ return super().get_pkgconfig_variable(variable_name, kwargs)
+
+
+INTROSPECT_COMMAND = '''import sysconfig
+import json
+import sys
+
+install_paths = sysconfig.get_paths(scheme='posix_prefix', vars={'base': '', 'platbase': '', 'installed_base': ''})
+
+def links_against_libpython():
+ from distutils.core import Distribution, Extension
+ cmd = Distribution().get_command_obj('build_ext')
+ cmd.ensure_finalized()
+ return bool(cmd.get_libraries(Extension('dummy', [])))
+
+print (json.dumps ({
+ 'variables': sysconfig.get_config_vars(),
+ 'paths': sysconfig.get_paths(),
+ 'install_paths': install_paths,
+ 'sys_paths': sys.path,
+ 'version': sysconfig.get_python_version(),
+ 'platform': sysconfig.get_platform(),
+ 'is_pypy': '__pypy__' in sys.builtin_module_names,
+ 'link_libpython': links_against_libpython(),
+}))
+'''
+
+if T.TYPE_CHECKING:
+ class PythonIntrospectionDict(TypedDict):
+
+ install_paths: T.Dict[str, str]
+ is_pypy: bool
+ link_libpython: bool
+ paths: T.Dict[str, str]
+ platform: str
+ suffix : str
+ variables: T.Dict[str, str]
+ version: str
+
+class PythonExternalProgram(ExternalProgram):
+ def __init__(self, name: str, command: T.Optional[T.List[str]] = None, ext_prog: T.Optional[ExternalProgram] = None):
+ if ext_prog is None:
+ super().__init__(name, command=command, silent=True)
+ else:
+ self.name = name
+ self.command = ext_prog.command
+ self.path = ext_prog.path
+ self.info: 'PythonIntrospectionDict' = {
+ 'install_paths': {},
+ 'is_pypy': False,
+ 'link_libpython': False,
+ 'paths': {},
+ 'platform': 'sentinal',
+ 'variables': {},
+ 'version': '0.0',
+ }
+
+ def _check_version(self, version: str) -> bool:
+ if self.name == 'python2':
+ return mesonlib.version_compare(version, '< 3.0')
+ elif self.name == 'python3':
+ return mesonlib.version_compare(version, '>= 3.0')
+ return True
+
+ def sanity(self) -> bool:
+ # Sanity check, we expect to have something that at least quacks in tune
+ cmd = self.get_command() + ['-c', INTROSPECT_COMMAND]
+ p, stdout, stderr = mesonlib.Popen_safe(cmd)
+ try:
+ info = json.loads(stdout)
+ except json.JSONDecodeError:
+ info = None
+ mlog.debug('Could not introspect Python (%s): exit code %d' % (str(p.args), p.returncode))
+ mlog.debug('Program stdout:\n')
+ mlog.debug(stdout)
+ mlog.debug('Program stderr:\n')
+ mlog.debug(stderr)
+
+ if info is not None and self._check_version(info['version']):
+ variables = info['variables']
+ info['suffix'] = variables.get('EXT_SUFFIX') or variables.get('SO') or variables.get('.so')
+ self.info = T.cast('PythonIntrospectionDict', info)
+ self.platlib = self._get_path('platlib')
+ self.purelib = self._get_path('purelib')
+ return True
+ else:
+ return False
+
+ def _get_path(self, key: str) -> None:
+ user_dir = str(Path.home())
+ sys_paths = self.info['sys_paths']
+ rel_path = self.info['install_paths'][key][1:]
+ if not any(p.endswith(rel_path) for p in sys_paths if not p.startswith(user_dir)):
+ # On Debian derivatives sysconfig install path is broken and is not
+ # included in the locations python actually lookup.
+ # See https://github.com/mesonbuild/meson/issues/8739.
+ mlog.warning('Broken python installation detected. Python files',
+ 'installed by Meson might not be found by python interpreter.',
+ once=True)
+ if mesonlib.is_debianlike():
+ rel_path = 'lib/python3/dist-packages'
+ return rel_path
+
+
+class PythonInstallation(ExternalProgramHolder):
+ def __init__(self, python, interpreter):
+ ExternalProgramHolder.__init__(self, python, interpreter)
+ info = python.info
+ prefix = self.interpreter.environment.coredata.get_option(mesonlib.OptionKey('prefix'))
+ self.variables = info['variables']
+ self.suffix = info['suffix']
+ self.paths = info['paths']
+ self.platlib_install_path = os.path.join(prefix, python.platlib)
+ self.purelib_install_path = os.path.join(prefix, python.purelib)
+ self.version = info['version']
+ self.platform = info['platform']
+ self.is_pypy = info['is_pypy']
+ self.link_libpython = info['link_libpython']
+ self.methods.update({
+ 'extension_module': self.extension_module_method,
+ 'dependency': self.dependency_method,
+ 'install_sources': self.install_sources_method,
+ 'get_install_dir': self.get_install_dir_method,
+ 'language_version': self.language_version_method,
+ 'found': self.found_method,
+ 'has_path': self.has_path_method,
+ 'get_path': self.get_path_method,
+ 'has_variable': self.has_variable_method,
+ 'get_variable': self.get_variable_method,
+ 'path': self.path_method,
+ })
+
+ @permittedKwargs(mod_kwargs)
+ def extension_module_method(self, args, kwargs):
+ if 'install_dir' in kwargs:
+ if 'subdir' in kwargs:
+ raise InvalidArguments('"subdir" and "install_dir" are mutually exclusive')
+ else:
+ subdir = kwargs.pop('subdir', '')
+ if not isinstance(subdir, str):
+ raise InvalidArguments('"subdir" argument must be a string.')
+
+ kwargs['install_dir'] = os.path.join(self.platlib_install_path, subdir)
+
+ # On macOS and some Linux distros (Debian) distutils doesn't link
+ # extensions against libpython. We call into distutils and mirror its
+ # behavior. See https://github.com/mesonbuild/meson/issues/4117
+ if not self.link_libpython:
+ new_deps = []
+ for dep in mesonlib.extract_as_list(kwargs, 'dependencies'):
+ if isinstance(dep, PythonDependency):
+ dep = dep.get_partial_dependency(compile_args=True)
+ new_deps.append(dep)
+ kwargs['dependencies'] = new_deps
+
+ # msys2's python3 has "-cpython-36m.dll", we have to be clever
+ # FIXME: explain what the specific cleverness is here
+ split, suffix = self.suffix.rsplit('.', 1)
+ args[0] += split
+
+ kwargs['name_prefix'] = ''
+ kwargs['name_suffix'] = suffix
+
+ return self.interpreter.func_shared_module(None, args, kwargs)
+
+ @permittedKwargs(permitted_dependency_kwargs | {'embed'})
+ @FeatureNewKwargs('python_installation.dependency', '0.53.0', ['embed'])
+ def dependency_method(self, args, kwargs):
+ if args:
+ mlog.warning('python_installation.dependency() does not take any '
+ 'positional arguments. It always returns a Python '
+ 'dependency. This will become an error in the future.',
+ location=self.interpreter.current_node)
+ disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+ if disabled:
+ mlog.log('Dependency', mlog.bold('python'), 'skipped: feature', mlog.bold(feature), 'disabled')
+ dep = NotFoundDependency(self.interpreter.environment)
+ else:
+ dep = PythonDependency(self, self.interpreter.environment, kwargs)
+ if required and not dep.found():
+ raise mesonlib.MesonException('Python dependency not found')
+ return dep
+
+ @permittedKwargs(['pure', 'subdir'])
+ def install_sources_method(self, args, kwargs):
+ pure = kwargs.pop('pure', True)
+ if not isinstance(pure, bool):
+ raise InvalidArguments('"pure" argument must be a boolean.')
+
+ subdir = kwargs.pop('subdir', '')
+ if not isinstance(subdir, str):
+ raise InvalidArguments('"subdir" argument must be a string.')
+
+ if pure:
+ kwargs['install_dir'] = os.path.join(self.purelib_install_path, subdir)
+ else:
+ kwargs['install_dir'] = os.path.join(self.platlib_install_path, subdir)
+
+ return self.interpreter.func_install_data(None, args, kwargs)
+
+ @noPosargs
+ @permittedKwargs(['pure', 'subdir'])
+ def get_install_dir_method(self, args, kwargs):
+ pure = kwargs.pop('pure', True)
+ if not isinstance(pure, bool):
+ raise InvalidArguments('"pure" argument must be a boolean.')
+
+ subdir = kwargs.pop('subdir', '')
+ if not isinstance(subdir, str):
+ raise InvalidArguments('"subdir" argument must be a string.')
+
+ if pure:
+ res = os.path.join(self.purelib_install_path, subdir)
+ else:
+ res = os.path.join(self.platlib_install_path, subdir)
+
+ return res
+
+ @noPosargs
+ @noKwargs
+ def language_version_method(self, args, kwargs):
+ return self.version
+
+ @noKwargs
+ def has_path_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InvalidArguments('has_path takes exactly one positional argument.')
+ path_name = args[0]
+ if not isinstance(path_name, str):
+ raise InvalidArguments('has_path argument must be a string.')
+
+ return path_name in self.paths
+
+ @noKwargs
+ def get_path_method(self, args, kwargs):
+ if len(args) not in (1, 2):
+ raise InvalidArguments('get_path must have one or two arguments.')
+ path_name = args[0]
+ if not isinstance(path_name, str):
+ raise InvalidArguments('get_path argument must be a string.')
+
+ try:
+ path = self.paths[path_name]
+ except KeyError:
+ if len(args) == 2:
+ path = args[1]
+ else:
+ raise InvalidArguments(f'{path_name} is not a valid path name')
+
+ return path
+
+ @noKwargs
+ def has_variable_method(self, args, kwargs):
+ if len(args) != 1:
+ raise InvalidArguments('has_variable takes exactly one positional argument.')
+ var_name = args[0]
+ if not isinstance(var_name, str):
+ raise InvalidArguments('has_variable argument must be a string.')
+
+ return var_name in self.variables
+
+ @noKwargs
+ def get_variable_method(self, args, kwargs):
+ if len(args) not in (1, 2):
+ raise InvalidArguments('get_variable must have one or two arguments.')
+ var_name = args[0]
+ if not isinstance(var_name, str):
+ raise InvalidArguments('get_variable argument must be a string.')
+
+ try:
+ var = self.variables[var_name]
+ except KeyError:
+ if len(args) == 2:
+ var = args[1]
+ else:
+ raise InvalidArguments(f'{var_name} is not a valid variable name')
+
+ return var
+
+ @noPosargs
+ @noKwargs
+ @FeatureNew('Python module path method', '0.50.0')
+ def path_method(self, args, kwargs):
+ return super().path_method(args, kwargs)
+
+
+class PythonModule(ExtensionModule):
+
+ @FeatureNew('Python Module', '0.46.0')
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.methods.update({
+ 'find_installation': self.find_installation,
+ })
+
+ # https://www.python.org/dev/peps/pep-0397/
+ def _get_win_pythonpath(self, name_or_path):
+ if name_or_path not in ['python2', 'python3']:
+ return None
+ if not shutil.which('py'):
+ # program not installed, return without an exception
+ return None
+ ver = {'python2': '-2', 'python3': '-3'}[name_or_path]
+ cmd = ['py', ver, '-c', "import sysconfig; print(sysconfig.get_config_var('BINDIR'))"]
+ _, stdout, _ = mesonlib.Popen_safe(cmd)
+ directory = stdout.strip()
+ if os.path.exists(directory):
+ return os.path.join(directory, 'python')
+ else:
+ return None
+
+
+ @FeatureNewKwargs('python.find_installation', '0.49.0', ['disabler'])
+ @FeatureNewKwargs('python.find_installation', '0.51.0', ['modules'])
+ @disablerIfNotFound
+ @permittedKwargs({'required', 'modules'})
+ def find_installation(self, state, args, kwargs):
+ feature_check = FeatureNew('Passing "feature" option to find_installation', '0.48.0')
+ disabled, required, feature = extract_required_kwarg(kwargs, state.subproject, feature_check)
+ want_modules = mesonlib.extract_as_list(kwargs, 'modules') # type: T.List[str]
+ found_modules = [] # type: T.List[str]
+ missing_modules = [] # type: T.List[str]
+ fallback = args[0] if args else ''
+ display_name = fallback or 'python'
+
+ if len(args) > 1:
+ raise InvalidArguments('find_installation takes zero or one positional argument.')
+
+ name_or_path = state.environment.lookup_binary_entry(MachineChoice.HOST, 'python')
+ if name_or_path is None and args:
+ name_or_path = fallback
+ if not isinstance(name_or_path, str):
+ raise InvalidArguments('find_installation argument must be a string.')
+
+ if disabled:
+ mlog.log('Program', name_or_path or 'python', 'found:', mlog.red('NO'), '(disabled by:', mlog.bold(feature), ')')
+ return NonExistingExternalProgram()
+
+ if not name_or_path:
+ python = PythonExternalProgram('python3', mesonlib.python_command)
+ else:
+ tmp_python = ExternalProgram.from_entry(display_name, name_or_path)
+ python = PythonExternalProgram(display_name, ext_prog=tmp_python)
+
+ if not python.found() and mesonlib.is_windows():
+ pythonpath = self._get_win_pythonpath(name_or_path)
+ if pythonpath is not None:
+ name_or_path = pythonpath
+ python = PythonExternalProgram(name_or_path)
+
+ # Last ditch effort, python2 or python3 can be named python
+ # on various platforms, let's not give up just yet, if an executable
+ # named python is available and has a compatible version, let's use
+ # it
+ if not python.found() and name_or_path in ['python2', 'python3']:
+ python = PythonExternalProgram('python')
+
+ if python.found() and want_modules:
+ for mod in want_modules:
+ p, out, err = mesonlib.Popen_safe(
+ python.command +
+ ['-c', f'import {mod}'])
+ if p.returncode != 0:
+ missing_modules.append(mod)
+ else:
+ found_modules.append(mod)
+
+ msg = ['Program', python.name]
+ if want_modules:
+ msg.append('({})'.format(', '.join(want_modules)))
+ msg.append('found:')
+ if python.found() and not missing_modules:
+ msg.extend([mlog.green('YES'), '({})'.format(' '.join(python.command))])
+ else:
+ msg.append(mlog.red('NO'))
+ if found_modules:
+ msg.append('modules:')
+ msg.append(', '.join(found_modules))
+
+ mlog.log(*msg)
+
+ if not python.found():
+ if required:
+ raise mesonlib.MesonException('{} not found'.format(name_or_path or 'python'))
+ return NonExistingExternalProgram()
+ elif missing_modules:
+ if required:
+ raise mesonlib.MesonException('{} is missing modules: {}'.format(name_or_path or 'python', ', '.join(missing_modules)))
+ return NonExistingExternalProgram()
+ else:
+ sane = python.sanity()
+
+ if sane:
+ return python
+ else:
+ if required:
+ raise mesonlib.MesonException(f'{python} is not a valid python or it is missing distutils')
+ return NonExistingExternalProgram()
+
+ raise mesonlib.MesonBugException('Unreachable code was reached (PythonModule.find_installation).')
+
+
+def initialize(*args, **kwargs):
+ mod = PythonModule(*args, **kwargs)
+ mod.interpreter.append_holder_map(PythonExternalProgram, PythonInstallation)
+ return mod
diff --git a/meson/mesonbuild/modules/python3.py b/meson/mesonbuild/modules/python3.py
new file mode 100644
index 000000000..dc1f7c775
--- /dev/null
+++ b/meson/mesonbuild/modules/python3.py
@@ -0,0 +1,81 @@
+# Copyright 2016-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sysconfig
+from .. import mesonlib
+
+from . import ExtensionModule
+from ..interpreterbase import noKwargs, permittedKwargs, FeatureDeprecated
+from ..build import known_shmod_kwargs
+from ..programs import ExternalProgram
+
+
+class Python3Module(ExtensionModule):
+ @FeatureDeprecated('python3 module', '0.48.0')
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.methods.update({
+ 'extension_module': self.extension_module,
+ 'find_python': self.find_python,
+ 'language_version': self.language_version,
+ 'sysconfig_path': self.sysconfig_path,
+ })
+
+ @permittedKwargs(known_shmod_kwargs)
+ def extension_module(self, state, args, kwargs):
+ if 'name_prefix' in kwargs:
+ raise mesonlib.MesonException('Name_prefix is set automatically, specifying it is forbidden.')
+ if 'name_suffix' in kwargs:
+ raise mesonlib.MesonException('Name_suffix is set automatically, specifying it is forbidden.')
+ host_system = state.host_machine.system
+ if host_system == 'darwin':
+ # Default suffix is 'dylib' but Python does not use it for extensions.
+ suffix = 'so'
+ elif host_system == 'windows':
+ # On Windows the extension is pyd for some unexplainable reason.
+ suffix = 'pyd'
+ else:
+ suffix = []
+ kwargs['name_prefix'] = ''
+ kwargs['name_suffix'] = suffix
+ return self.interpreter.func_shared_module(None, args, kwargs)
+
+ @noKwargs
+ def find_python(self, state, args, kwargs):
+ command = state.environment.lookup_binary_entry(mesonlib.MachineChoice.HOST, 'python3')
+ if command is not None:
+ py3 = ExternalProgram.from_entry('python3', command)
+ else:
+ py3 = ExternalProgram('python3', mesonlib.python_command, silent=True)
+ return py3
+
+ @noKwargs
+ def language_version(self, state, args, kwargs):
+ return sysconfig.get_python_version()
+
+ @noKwargs
+ def sysconfig_path(self, state, args, kwargs):
+ if len(args) != 1:
+ raise mesonlib.MesonException('sysconfig_path() requires passing the name of path to get.')
+ path_name = args[0]
+ valid_names = sysconfig.get_path_names()
+ if path_name not in valid_names:
+ raise mesonlib.MesonException(f'{path_name} is not a valid path name {valid_names}.')
+
+ # Get a relative path without a prefix, e.g. lib/python3.6/site-packages
+ return sysconfig.get_path(path_name, vars={'base': '', 'platbase': '', 'installed_base': ''})[1:]
+
+
+def initialize(*args, **kwargs):
+ return Python3Module(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/qt.py b/meson/mesonbuild/modules/qt.py
new file mode 100644
index 000000000..207a02564
--- /dev/null
+++ b/meson/mesonbuild/modules/qt.py
@@ -0,0 +1,524 @@
+# Copyright 2015 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mesonbuild import coredata
+import os
+import shutil
+import typing as T
+import xml.etree.ElementTree as ET
+
+from . import ModuleReturnValue, ExtensionModule
+from .. import build
+from .. import mlog
+from ..dependencies import find_external_dependency, Dependency, ExternalLibrary
+from ..mesonlib import MesonException, File, FileOrString, version_compare, Popen_safe
+from . import ModuleReturnValue, ExtensionModule
+from ..interpreter import extract_required_kwarg
+from ..interpreterbase import ContainerTypeInfo, FeatureDeprecated, KwargInfo, noPosargs, FeatureNew, typed_kwargs
+from ..programs import ExternalProgram, NonExistingExternalProgram
+
+if T.TYPE_CHECKING:
+ from . import ModuleState
+ from ..dependencies.qt import QtPkgConfigDependency, QmakeQtDependency
+ from ..interpreter import Interpreter
+ from ..interpreter import kwargs
+
+ QtDependencyType = T.Union[QtPkgConfigDependency, QmakeQtDependency]
+
+ from typing_extensions import TypedDict
+
+ class ResourceCompilerKwArgs(TypedDict):
+
+ """Keyword arguments for the Resource Compiler method."""
+
+ name: T.Optional[str]
+ sources: T.List[FileOrString]
+ extra_args: T.List[str]
+ method: str
+
+ class UICompilerKwArgs(TypedDict):
+
+ """Keyword arguments for the Ui Compiler method."""
+
+ sources: T.Sequence[T.Union[FileOrString, build.CustomTarget]]
+ extra_args: T.List[str]
+ method: str
+
+ class MocCompilerKwArgs(TypedDict):
+
+ """Keyword arguments for the Moc Compiler method."""
+
+ sources: T.List[T.Union[FileOrString, build.CustomTarget]]
+ headers: T.List[T.Union[FileOrString, build.CustomTarget]]
+ extra_args: T.List[str]
+ method: str
+ include_directories: T.List[T.Union[str, build.IncludeDirs]]
+ dependencies: T.List[T.Union[Dependency, ExternalLibrary]]
+
+ class PreprocessKwArgs(TypedDict):
+
+ sources: T.List[FileOrString]
+ moc_sources: T.List[T.Union[FileOrString, build.CustomTarget]]
+ moc_headers: T.List[T.Union[FileOrString, build.CustomTarget]]
+ qresources: T.List[FileOrString]
+ ui_files: T.List[T.Union[FileOrString, build.CustomTarget]]
+ moc_extra_arguments: T.List[str]
+ rcc_extra_arguments: T.List[str]
+ uic_extra_arguments: T.List[str]
+ include_directories: T.List[T.Union[str, build.IncludeDirs]]
+ dependencies: T.List[T.Union[Dependency, ExternalLibrary]]
+ method: str
+
+ class HasToolKwArgs(kwargs.ExtractRequired):
+
+ method: str
+
+ class CompileTranslationsKwArgs(TypedDict):
+
+ build_by_default: bool
+ install: bool
+ install_dir: T.Optional[str]
+ method: str
+ qresource: T.Optional[str]
+ rcc_extra_arguments: T.List[str]
+ ts_files: T.List[str]
+
+
+class QtBaseModule(ExtensionModule):
+ _tools_detected = False
+ _rcc_supports_depfiles = False
+
+ def __init__(self, interpreter: 'Interpreter', qt_version: int = 5):
+ ExtensionModule.__init__(self, interpreter)
+ self.qt_version = qt_version
+ self.tools: T.Dict[str, ExternalProgram] = {
+ 'moc': NonExistingExternalProgram('moc'),
+ 'uic': NonExistingExternalProgram('uic'),
+ 'rcc': NonExistingExternalProgram('rcc'),
+ 'lrelease': NonExistingExternalProgram('lrelease'),
+ }
+ self.methods.update({
+ 'has_tools': self.has_tools,
+ 'preprocess': self.preprocess,
+ 'compile_translations': self.compile_translations,
+ 'compile_resources': self.compile_resources,
+ 'compile_ui': self.compile_ui,
+ 'compile_moc': self.compile_moc,
+ })
+
+ def compilers_detect(self, state: 'ModuleState', qt_dep: 'QtDependencyType') -> None:
+ """Detect Qt (4 or 5) moc, uic, rcc in the specified bindir or in PATH"""
+ # It is important that this list does not change order as the order of
+ # the returned ExternalPrograms will change as well
+ wanted = f'== {qt_dep.version}'
+
+ def gen_bins() -> T.Generator[T.Tuple[str, str], None, None]:
+ for b in self.tools:
+ if qt_dep.bindir:
+ yield os.path.join(qt_dep.bindir, b), b
+ # prefer the <tool>-qt<version> of the tool to the plain one, as we
+ # don't know what the unsuffixed one points to without calling it.
+ yield f'{b}-qt{qt_dep.qtver}', b
+ yield b, b
+
+ for b, name in gen_bins():
+ if self.tools[name].found():
+ continue
+
+ if name == 'lrelease':
+ arg = ['-version']
+ elif version_compare(qt_dep.version, '>= 5'):
+ arg = ['--version']
+ else:
+ arg = ['-v']
+
+ # Ensure that the version of qt and each tool are the same
+ def get_version(p: ExternalProgram) -> str:
+ _, out, err = Popen_safe(p.get_command() + arg)
+ if b.startswith('lrelease') or not qt_dep.version.startswith('4'):
+ care = out
+ else:
+ care = err
+ return care.split(' ')[-1].replace(')', '').strip()
+
+ p = state.find_program(b, required=False,
+ version_func=get_version,
+ wanted=wanted)
+ if p.found():
+ self.tools[name] = p
+
+ def _detect_tools(self, state: 'ModuleState', method: str, required: bool = True) -> None:
+ if self._tools_detected:
+ return
+ self._tools_detected = True
+ mlog.log(f'Detecting Qt{self.qt_version} tools')
+ kwargs = {'required': required, 'modules': 'Core', 'method': method}
+ # Just pick one to make mypy happy
+ qt = T.cast('QtPkgConfigDependency', find_external_dependency(f'qt{self.qt_version}', state.environment, kwargs))
+ if qt.found():
+ # Get all tools and then make sure that they are the right version
+ self.compilers_detect(state, qt)
+ if version_compare(qt.version, '>=5.14.0'):
+ self._rcc_supports_depfiles = True
+ else:
+ mlog.warning('rcc dependencies will not work properly until you move to Qt >= 5.14:',
+ mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'), fatal=False)
+ else:
+ suffix = f'-qt{self.qt_version}'
+ self.tools['moc'] = NonExistingExternalProgram(name='moc' + suffix)
+ self.tools['uic'] = NonExistingExternalProgram(name='uic' + suffix)
+ self.tools['rcc'] = NonExistingExternalProgram(name='rcc' + suffix)
+ self.tools['lrelease'] = NonExistingExternalProgram(name='lrelease' + suffix)
+
+ @staticmethod
+ def _qrc_nodes(state: 'ModuleState', rcc_file: 'FileOrString') -> T.Tuple[str, T.List[str]]:
+ abspath: str
+ if isinstance(rcc_file, str):
+ abspath = os.path.join(state.environment.source_dir, state.subdir, rcc_file)
+ rcc_dirname = os.path.dirname(abspath)
+ else:
+ abspath = rcc_file.absolute_path(state.environment.source_dir, state.environment.build_dir)
+ rcc_dirname = os.path.dirname(abspath)
+
+ # FIXME: what error are we actually tring to check here?
+ try:
+ tree = ET.parse(abspath)
+ root = tree.getroot()
+ result: T.List[str] = []
+ for child in root[0]:
+ if child.tag != 'file':
+ mlog.warning("malformed rcc file: ", os.path.join(state.subdir, str(rcc_file)))
+ break
+ else:
+ result.append(child.text)
+
+ return rcc_dirname, result
+ except Exception:
+ raise MesonException(f'Unable to parse resource file {abspath}')
+
+ def _parse_qrc_deps(self, state: 'ModuleState', rcc_file: 'FileOrString') -> T.List[File]:
+ rcc_dirname, nodes = self._qrc_nodes(state, rcc_file)
+ result: T.List[File] = []
+ for resource_path in nodes:
+ # We need to guess if the pointed resource is:
+ # a) in build directory -> implies a generated file
+ # b) in source directory
+ # c) somewhere else external dependency file to bundle
+ #
+ # Also from qrc documentation: relative path are always from qrc file
+ # So relative path must always be computed from qrc file !
+ if os.path.isabs(resource_path):
+ # a)
+ if resource_path.startswith(os.path.abspath(state.environment.build_dir)):
+ resource_relpath = os.path.relpath(resource_path, state.environment.build_dir)
+ result.append(File(is_built=True, subdir='', fname=resource_relpath))
+ # either b) or c)
+ else:
+ result.append(File(is_built=False, subdir=state.subdir, fname=resource_path))
+ else:
+ path_from_rcc = os.path.normpath(os.path.join(rcc_dirname, resource_path))
+ # a)
+ if path_from_rcc.startswith(state.environment.build_dir):
+ result.append(File(is_built=True, subdir=state.subdir, fname=resource_path))
+ # b)
+ else:
+ result.append(File(is_built=False, subdir=state.subdir, fname=path_from_rcc))
+ return result
+
+ @FeatureNew('qt.has_tools', '0.54.0')
+ @noPosargs
+ @typed_kwargs(
+ 'qt.has_tools',
+ KwargInfo('required', (bool, coredata.UserFeatureOption), default=False),
+ KwargInfo('method', str, default='auto'),
+ )
+ def has_tools(self, state: 'ModuleState', args: T.Tuple, kwargs: 'HasToolKwArgs') -> bool:
+ method = kwargs.get('method', 'auto')
+ # We have to cast here because TypedDicts are invariant, even though
+ # ExtractRequiredKwArgs is a subset of HasToolKwArgs, type checkers
+ # will insist this is wrong
+ disabled, required, feature = extract_required_kwarg(kwargs, state.subproject, default=False)
+ if disabled:
+ mlog.log('qt.has_tools skipped: feature', mlog.bold(feature), 'disabled')
+ return False
+ self._detect_tools(state, method, required=False)
+ for tool in self.tools.values():
+ if not tool.found():
+ if required:
+ raise MesonException('Qt tools not found')
+ return False
+ return True
+
+ @FeatureNew('qt.compile_resources', '0.59.0')
+ @noPosargs
+ @typed_kwargs(
+ 'qt.compile_resources',
+ KwargInfo('name', str),
+ KwargInfo('sources', ContainerTypeInfo(list, (File, str), allow_empty=False), listify=True, required=True),
+ KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+ KwargInfo('method', str, default='auto')
+ )
+ def compile_resources(self, state: 'ModuleState', args: T.Tuple, kwargs: 'ResourceCompilerKwArgs') -> ModuleReturnValue:
+ """Compile Qt resources files.
+
+ Uses CustomTargets to generate .cpp files from .qrc files.
+ """
+ out = self._compile_resources_impl(state, kwargs)
+ return ModuleReturnValue(out, [out])
+
+ def _compile_resources_impl(self, state: 'ModuleState', kwargs: 'ResourceCompilerKwArgs') -> T.List[build.CustomTarget]:
+ # Avoid the FeatureNew when dispatching from preprocess
+ self._detect_tools(state, kwargs['method'])
+ if not self.tools['rcc'].found():
+ err_msg = ("{0} sources specified and couldn't find {1}, "
+ "please check your qt{2} installation")
+ raise MesonException(err_msg.format('RCC', f'rcc-qt{self.qt_version}', self.qt_version))
+
+ # List of generated CustomTargets
+ targets: T.List[build.CustomTarget] = []
+
+ # depfile arguments
+ DEPFILE_ARGS: T.List[str] = ['--depfile', '@DEPFILE@'] if self._rcc_supports_depfiles else []
+
+ name = kwargs['name']
+ sources = kwargs['sources']
+ extra_args = kwargs['extra_args']
+
+ # If a name was set generate a single .cpp file from all of the qrc
+ # files, otherwise generate one .cpp file per qrc file.
+ if name:
+ qrc_deps: T.List[File] = []
+ for s in sources:
+ qrc_deps.extend(self._parse_qrc_deps(state, s))
+
+ rcc_kwargs: T.Dict[str, T.Any] = { # TODO: if CustomTarget had typing information we could use that here...
+ 'input': sources,
+ 'output': name + '.cpp',
+ 'command': self.tools['rcc'].get_command() + ['-name', name, '-o', '@OUTPUT@'] + extra_args + ['@INPUT@'] + DEPFILE_ARGS,
+ 'depend_files': qrc_deps,
+ 'depfile': f'{name}.d',
+ }
+ res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs)
+ targets.append(res_target)
+ else:
+ for rcc_file in sources:
+ qrc_deps = self._parse_qrc_deps(state, rcc_file)
+ if isinstance(rcc_file, str):
+ basename = os.path.basename(rcc_file)
+ else:
+ basename = os.path.basename(rcc_file.fname)
+ name = f'qt{self.qt_version}-{basename.replace(".", "_")}'
+ rcc_kwargs = {
+ 'input': rcc_file,
+ 'output': f'{name}.cpp',
+ 'command': self.tools['rcc'].get_command() + ['-name', '@BASENAME@', '-o', '@OUTPUT@'] + extra_args + ['@INPUT@'] + DEPFILE_ARGS,
+ 'depend_files': qrc_deps,
+ 'depfile': f'{name}.d',
+ }
+ res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs)
+ targets.append(res_target)
+
+ return targets
+
+ @FeatureNew('qt.compile_ui', '0.59.0')
+ @noPosargs
+ @typed_kwargs(
+ 'qt.compile_ui',
+ KwargInfo('sources', ContainerTypeInfo(list, (File, str), allow_empty=False), listify=True, required=True),
+ KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+ KwargInfo('method', str, default='auto')
+ )
+ def compile_ui(self, state: 'ModuleState', args: T.Tuple, kwargs: 'UICompilerKwArgs') -> ModuleReturnValue:
+ """Compile UI resources into cpp headers."""
+ out = self._compile_ui_impl(state, kwargs)
+ return ModuleReturnValue(out, [out])
+
+ def _compile_ui_impl(self, state: 'ModuleState', kwargs: 'UICompilerKwArgs') -> build.GeneratedList:
+ # Avoid the FeatureNew when dispatching from preprocess
+ self._detect_tools(state, kwargs['method'])
+ if not self.tools['uic'].found():
+ err_msg = ("{0} sources specified and couldn't find {1}, "
+ "please check your qt{2} installation")
+ raise MesonException(err_msg.format('UIC', f'uic-qt{self.qt_version}', self.qt_version))
+
+ # TODO: This generator isn't added to the generator list in the Interpreter
+ gen = build.Generator(
+ self.tools['uic'],
+ kwargs['extra_args'] + ['-o', '@OUTPUT@', '@INPUT@'],
+ ['ui_@BASENAME@.h'],
+ name=f'Qt{self.qt_version} ui')
+ return gen.process_files(kwargs['sources'], state)
+
+ @FeatureNew('qt.compile_moc', '0.59.0')
+ @noPosargs
+ @typed_kwargs(
+ 'qt.compile_moc',
+ KwargInfo('sources', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]),
+ KwargInfo('headers', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]),
+ KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+ KwargInfo('method', str, default='auto'),
+ KwargInfo('include_directories', ContainerTypeInfo(list, (build.IncludeDirs, str)), listify=True, default=[]),
+ KwargInfo('dependencies', ContainerTypeInfo(list, (Dependency, ExternalLibrary)), listify=True, default=[]),
+ )
+ def compile_moc(self, state: 'ModuleState', args: T.Tuple, kwargs: 'MocCompilerKwArgs') -> ModuleReturnValue:
+ out = self._compile_moc_impl(state, kwargs)
+ return ModuleReturnValue(out, [out])
+
+ def _compile_moc_impl(self, state: 'ModuleState', kwargs: 'MocCompilerKwArgs') -> T.List[build.GeneratedList]:
+ # Avoid the FeatureNew when dispatching from preprocess
+ self._detect_tools(state, kwargs['method'])
+ if not self.tools['moc'].found():
+ err_msg = ("{0} sources specified and couldn't find {1}, "
+ "please check your qt{2} installation")
+ raise MesonException(err_msg.format('MOC', f'uic-qt{self.qt_version}', self.qt_version))
+
+ if not (kwargs['headers'] or kwargs['sources']):
+ raise build.InvalidArguments('At least one of the "headers" or "sources" keyword arguments must be provied and not empty')
+
+ inc = state.get_include_args(include_dirs=kwargs['include_directories'])
+ compile_args: T.List[str] = []
+ for dep in kwargs['dependencies']:
+ compile_args.extend([a for a in dep.get_all_compile_args() if a.startswith(('-I', '-D'))])
+
+ output: T.List[build.GeneratedList] = []
+
+ arguments = kwargs['extra_args'] + inc + compile_args + ['@INPUT@', '-o', '@OUTPUT@']
+ if kwargs['headers']:
+ moc_gen = build.Generator(
+ self.tools['moc'], arguments, ['moc_@BASENAME@.cpp'],
+ name=f'Qt{self.qt_version} moc header')
+ output.append(moc_gen.process_files(kwargs['headers'], state))
+ if kwargs['sources']:
+ moc_gen = build.Generator(
+ self.tools['moc'], arguments, ['@BASENAME@.moc'],
+ name=f'Qt{self.qt_version} moc source')
+ output.append(moc_gen.process_files(kwargs['sources'], state))
+
+ return output
+
+ # We can't use typed_pos_args here, the signature is ambiguious
+ @typed_kwargs(
+ 'qt.preprocess',
+ KwargInfo('sources', ContainerTypeInfo(list, (File, str)), listify=True, default=[], deprecated='0.59.0'),
+ KwargInfo('qresources', ContainerTypeInfo(list, (File, str)), listify=True, default=[]),
+ KwargInfo('ui_files', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]),
+ KwargInfo('moc_sources', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]),
+ KwargInfo('moc_headers', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]),
+ KwargInfo('moc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.44.0'),
+ KwargInfo('rcc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.49.0'),
+ KwargInfo('uic_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.49.0'),
+ KwargInfo('method', str, default='auto'),
+ KwargInfo('include_directories', ContainerTypeInfo(list, (build.IncludeDirs, str)), listify=True, default=[]),
+ KwargInfo('dependencies', ContainerTypeInfo(list, (Dependency, ExternalLibrary)), listify=True, default=[]),
+ )
+ def preprocess(self, state: 'ModuleState', args: T.List[T.Union[str, File]], kwargs: 'PreprocessKwArgs') -> ModuleReturnValue:
+ _sources = args[1:]
+ if _sources:
+ FeatureDeprecated.single_use('qt.preprocess positional sources', '0.59', state.subproject)
+ # List is invariant, os we have to cast...
+ sources = T.cast(T.List[T.Union[str, File, build.GeneratedList, build.CustomTarget]],
+ _sources + kwargs['sources'])
+ for s in sources:
+ if not isinstance(s, (str, File)):
+ raise build.InvalidArguments('Variadic arguments to qt.preprocess must be Strings or Files')
+ method = kwargs['method']
+
+ if kwargs['qresources']:
+ # custom output name set? -> one output file, multiple otherwise
+ rcc_kwargs: 'ResourceCompilerKwArgs' = {'name': '', 'sources': kwargs['qresources'], 'extra_args': kwargs['rcc_extra_arguments'], 'method': method}
+ if args:
+ name = args[0]
+ if not isinstance(name, str):
+ raise build.InvalidArguments('First argument to qt.preprocess must be a string')
+ rcc_kwargs['name'] = name
+ sources.extend(self._compile_resources_impl(state, rcc_kwargs))
+
+ if kwargs['ui_files']:
+ ui_kwargs: 'UICompilerKwArgs' = {'sources': kwargs['ui_files'], 'extra_args': kwargs['uic_extra_arguments'], 'method': method}
+ sources.append(self._compile_ui_impl(state, ui_kwargs))
+
+ if kwargs['moc_headers'] or kwargs['moc_sources']:
+ moc_kwargs: 'MocCompilerKwArgs' = {
+ 'extra_args': kwargs['moc_extra_arguments'],
+ 'sources': kwargs['moc_sources'],
+ 'headers': kwargs['moc_headers'],
+ 'include_directories': kwargs['include_directories'],
+ 'dependencies': kwargs['dependencies'],
+ 'method': method,
+ }
+ sources.extend(self._compile_moc_impl(state, moc_kwargs))
+
+ return ModuleReturnValue(sources, [sources])
+
+ @FeatureNew('qt.compile_translations', '0.44.0')
+ @noPosargs
+ @typed_kwargs(
+ 'qt.compile_translations',
+ KwargInfo('build_by_default', bool, default=False),
+ KwargInfo('install', bool, default=False),
+ KwargInfo('install_dir', str),
+ KwargInfo('method', str, default='auto'),
+ KwargInfo('qresource', str, since='0.56.0'),
+ KwargInfo('rcc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.56.0'),
+ KwargInfo('ts_files', ContainerTypeInfo(list, (str, File)), listify=True, default=[]),
+ )
+ def compile_translations(self, state: 'ModuleState', args: T.Tuple, kwargs: 'CompileTranslationsKwArgs') -> ModuleReturnValue:
+ ts_files = kwargs['ts_files']
+ install_dir = kwargs['install_dir']
+ qresource = kwargs['qresource']
+ if qresource:
+ if ts_files:
+ raise MesonException('qt.compile_translations: Cannot specify both ts_files and qresource')
+ if os.path.dirname(qresource) != '':
+ raise MesonException('qt.compile_translations: qresource file name must not contain a subdirectory.')
+ qresource_file = File.from_built_file(state.subdir, qresource)
+ infile_abs = os.path.join(state.environment.source_dir, qresource_file.relative_name())
+ outfile_abs = os.path.join(state.environment.build_dir, qresource_file.relative_name())
+ os.makedirs(os.path.dirname(outfile_abs), exist_ok=True)
+ shutil.copy2(infile_abs, outfile_abs)
+ self.interpreter.add_build_def_file(infile_abs)
+
+ _, nodes = self._qrc_nodes(state, qresource_file)
+ for c in nodes:
+ if c.endswith('.qm'):
+ ts_files.append(c.rstrip('.qm') + '.ts')
+ else:
+ raise MesonException(f'qt.compile_translations: qresource can only contain qm files, found {c}')
+ results = self.preprocess(state, [], {'qresources': qresource_file, 'rcc_extra_arguments': kwargs['rcc_extra_arguments']})
+ self._detect_tools(state, kwargs['method'])
+ translations: T.List[build.CustomTarget] = []
+ for ts in ts_files:
+ if not self.tools['lrelease'].found():
+ raise MesonException('qt.compile_translations: ' +
+ self.tools['lrelease'].name + ' not found')
+ if qresource:
+ outdir = os.path.dirname(os.path.normpath(os.path.join(state.subdir, ts)))
+ ts = os.path.basename(ts)
+ else:
+ outdir = state.subdir
+ cmd = [self.tools['lrelease'], '@INPUT@', '-qm', '@OUTPUT@']
+ lrelease_kwargs = {'output': '@BASENAME@.qm',
+ 'input': ts,
+ 'install': kwargs.get('install', False),
+ 'build_by_default': kwargs.get('build_by_default', False),
+ 'command': cmd}
+ if install_dir is not None:
+ lrelease_kwargs['install_dir'] = install_dir
+ lrelease_target = build.CustomTarget(f'qt{self.qt_version}-compile-{ts}', outdir, state.subproject, lrelease_kwargs)
+ translations.append(lrelease_target)
+ if qresource:
+ return ModuleReturnValue(results.return_value[0], [results.new_objects, translations])
+ else:
+ return ModuleReturnValue(translations, [translations])
diff --git a/meson/mesonbuild/modules/qt4.py b/meson/mesonbuild/modules/qt4.py
new file mode 100644
index 000000000..e85a1506f
--- /dev/null
+++ b/meson/mesonbuild/modules/qt4.py
@@ -0,0 +1,25 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .qt import QtBaseModule
+
+
+class Qt4Module(QtBaseModule):
+
+ def __init__(self, interpreter):
+ QtBaseModule.__init__(self, interpreter, qt_version=4)
+
+
+def initialize(*args, **kwargs):
+ return Qt4Module(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/qt5.py b/meson/mesonbuild/modules/qt5.py
new file mode 100644
index 000000000..873c2dbeb
--- /dev/null
+++ b/meson/mesonbuild/modules/qt5.py
@@ -0,0 +1,25 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .qt import QtBaseModule
+
+
+class Qt5Module(QtBaseModule):
+
+ def __init__(self, interpreter):
+ QtBaseModule.__init__(self, interpreter, qt_version=5)
+
+
+def initialize(*args, **kwargs):
+ return Qt5Module(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/qt6.py b/meson/mesonbuild/modules/qt6.py
new file mode 100644
index 000000000..d9cd6519d
--- /dev/null
+++ b/meson/mesonbuild/modules/qt6.py
@@ -0,0 +1,25 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .qt import QtBaseModule
+
+
+class Qt6Module(QtBaseModule):
+
+ def __init__(self, interpreter):
+ QtBaseModule.__init__(self, interpreter, qt_version=6)
+
+
+def initialize(*args, **kwargs):
+ return Qt6Module(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/rpm.py b/meson/mesonbuild/modules/rpm.py
new file mode 100644
index 000000000..1fae14444
--- /dev/null
+++ b/meson/mesonbuild/modules/rpm.py
@@ -0,0 +1,186 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''This module provides helper functions for RPM related
+functionality such as generating template RPM spec file.'''
+
+from .. import build
+from .. import compilers
+import datetime
+from .. import mlog
+from . import GirTarget, TypelibTarget
+from . import ExtensionModule
+from ..interpreterbase import noKwargs
+
+import os
+
+class RPMModule(ExtensionModule):
+ def __init__(self, interpreter):
+ super().__init__(interpreter)
+ self.methods.update({
+ 'generate_spec_template': self.generate_spec_template,
+ })
+
+ @noKwargs
+ def generate_spec_template(self, state, args, kwargs):
+ required_compilers = self.__get_required_compilers(state)
+ proj = state.project_name.replace(' ', '_').replace('\t', '_')
+ so_installed = False
+ devel_subpkg = False
+ files = set()
+ files_devel = set()
+ to_delete = set()
+ for target in state.targets.values():
+ if isinstance(target, build.Executable) and target.need_install:
+ files.add('%%{_bindir}/%s' % target.get_filename())
+ elif isinstance(target, build.SharedLibrary) and target.need_install:
+ files.add('%%{_libdir}/%s' % target.get_filename())
+ for alias in target.get_aliases():
+ if alias.endswith('.so'):
+ files_devel.add('%%{_libdir}/%s' % alias)
+ else:
+ files.add('%%{_libdir}/%s' % alias)
+ so_installed = True
+ elif isinstance(target, build.StaticLibrary) and target.need_install:
+ to_delete.add('%%{buildroot}%%{_libdir}/%s' % target.get_filename())
+ mlog.warning('removing', mlog.bold(target.get_filename()),
+ 'from package because packaging static libs not recommended')
+ elif isinstance(target, GirTarget) and target.should_install():
+ files_devel.add('%%{_datadir}/gir-1.0/%s' % target.get_filename()[0])
+ elif isinstance(target, TypelibTarget) and target.should_install():
+ files.add('%%{_libdir}/girepository-1.0/%s' % target.get_filename()[0])
+ for header in state.headers:
+ if header.get_install_subdir():
+ files_devel.add('%%{_includedir}/%s/' % header.get_install_subdir())
+ else:
+ for hdr_src in header.get_sources():
+ files_devel.add('%%{_includedir}/%s' % hdr_src)
+ for man in state.man:
+ for man_file in man.get_sources():
+ if man.locale:
+ files.add('%%{_mandir}/%s/man%u/%s.*' % (man.locale, int(man_file.split('.')[-1]), man_file))
+ else:
+ files.add('%%{_mandir}/man%u/%s.*' % (int(man_file.split('.')[-1]), man_file))
+ if files_devel:
+ devel_subpkg = True
+
+ filename = os.path.join(state.environment.get_build_dir(),
+ '%s.spec' % proj)
+ with open(filename, 'w+', encoding='utf-8') as fn:
+ fn.write('Name: %s\n' % proj)
+ fn.write('Version: # FIXME\n')
+ fn.write('Release: 1%{?dist}\n')
+ fn.write('Summary: # FIXME\n')
+ fn.write('License: # FIXME\n')
+ fn.write('\n')
+ fn.write('Source0: %{name}-%{version}.tar.xz # FIXME\n')
+ fn.write('\n')
+ fn.write('BuildRequires: meson\n')
+ for compiler in required_compilers:
+ fn.write('BuildRequires: %s\n' % compiler)
+ for dep in state.environment.coredata.deps.host:
+ fn.write('BuildRequires: pkgconfig(%s)\n' % dep[0])
+# ext_libs and ext_progs have been removed from coredata so the following code
+# no longer works. It is kept as a reminder of the idea should anyone wish
+# to re-implement it.
+#
+# for lib in state.environment.coredata.ext_libs.values():
+# name = lib.get_name()
+# fn.write('BuildRequires: {} # FIXME\n'.format(name))
+# mlog.warning('replace', mlog.bold(name), 'with the real package.',
+# 'You can use following command to find package which '
+# 'contains this lib:',
+# mlog.bold("dnf provides '*/lib{}.so'".format(name)))
+# for prog in state.environment.coredata.ext_progs.values():
+# if not prog.found():
+# fn.write('BuildRequires: %%{_bindir}/%s # FIXME\n' %
+# prog.get_name())
+# else:
+# fn.write('BuildRequires: {}\n'.format(prog.get_path()))
+ fn.write('\n')
+ fn.write('%description\n')
+ fn.write('\n')
+ if devel_subpkg:
+ fn.write('%package devel\n')
+ fn.write('Summary: Development files for %{name}\n')
+ fn.write('Requires: %{name}%{?_isa} = %{?epoch:%{epoch}:}{version}-%{release}\n')
+ fn.write('\n')
+ fn.write('%description devel\n')
+ fn.write('Development files for %{name}.\n')
+ fn.write('\n')
+ fn.write('%prep\n')
+ fn.write('%autosetup\n')
+ fn.write('\n')
+ fn.write('%build\n')
+ fn.write('%meson\n')
+ fn.write('%meson_build\n')
+ fn.write('\n')
+ fn.write('%install\n')
+ fn.write('%meson_install\n')
+ if to_delete:
+ fn.write('rm -vf %s\n' % ' '.join(to_delete))
+ fn.write('\n')
+ fn.write('%check\n')
+ fn.write('%meson_test\n')
+ fn.write('\n')
+ fn.write('%files\n')
+ for f in files:
+ fn.write('%s\n' % f)
+ fn.write('\n')
+ if devel_subpkg:
+ fn.write('%files devel\n')
+ for f in files_devel:
+ fn.write('%s\n' % f)
+ fn.write('\n')
+ if so_installed:
+ fn.write('%post -p /sbin/ldconfig\n')
+ fn.write('%postun -p /sbin/ldconfig\n')
+ fn.write('\n')
+ fn.write('%changelog\n')
+ fn.write('* %s meson <meson@example.com> - \n' %
+ datetime.date.today().strftime('%a %b %d %Y'))
+ fn.write('- \n')
+ fn.write('\n')
+ mlog.log('RPM spec template written to %s.spec.\n' % proj)
+
+ def __get_required_compilers(self, state):
+ required_compilers = set()
+ for compiler in state.environment.coredata.compilers.host.values():
+ # Elbrus has one 'lcc' package for every compiler
+ if isinstance(compiler, compilers.GnuCCompiler):
+ required_compilers.add('gcc')
+ elif isinstance(compiler, compilers.GnuCPPCompiler):
+ required_compilers.add('gcc-c++')
+ elif isinstance(compiler, compilers.ElbrusCCompiler):
+ required_compilers.add('lcc')
+ elif isinstance(compiler, compilers.ElbrusCPPCompiler):
+ required_compilers.add('lcc')
+ elif isinstance(compiler, compilers.ElbrusFortranCompiler):
+ required_compilers.add('lcc')
+ elif isinstance(compiler, compilers.ValaCompiler):
+ required_compilers.add('vala')
+ elif isinstance(compiler, compilers.GnuFortranCompiler):
+ required_compilers.add('gcc-gfortran')
+ elif isinstance(compiler, compilers.GnuObjCCompiler):
+ required_compilers.add('gcc-objc')
+ elif compiler == compilers.GnuObjCPPCompiler:
+ required_compilers.add('gcc-objc++')
+ else:
+ mlog.log('RPM spec file not created, generation not allowed for:',
+ mlog.bold(compiler.get_id()))
+ return required_compilers
+
+
+def initialize(*args, **kwargs):
+ return RPMModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/sourceset.py b/meson/mesonbuild/modules/sourceset.py
new file mode 100644
index 000000000..ba8b30032
--- /dev/null
+++ b/meson/mesonbuild/modules/sourceset.py
@@ -0,0 +1,198 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import namedtuple
+from .. import mesonlib
+from .. import build
+from ..mesonlib import listify, OrderedSet
+from . import ExtensionModule, ModuleObject, MutableModuleObject
+from ..interpreterbase import (
+ noPosargs, noKwargs, permittedKwargs,
+ InterpreterException, InvalidArguments, InvalidCode, FeatureNew,
+)
+
+SourceSetRule = namedtuple('SourceSetRule', 'keys sources if_false sourcesets dependencies extra_deps')
+SourceFiles = namedtuple('SourceFiles', 'sources dependencies')
+
+class SourceSet(MutableModuleObject):
+ def __init__(self, interpreter):
+ super().__init__()
+ self.rules = []
+ self.subproject = interpreter.subproject
+ self.environment = interpreter.environment
+ self.subdir = interpreter.subdir
+ self.frozen = False
+ self.methods.update({
+ 'add': self.add_method,
+ 'add_all': self.add_all_method,
+ 'all_sources': self.all_sources_method,
+ 'all_dependencies': self.all_dependencies_method,
+ 'apply': self.apply_method,
+ })
+
+ def check_source_files(self, arg, allow_deps):
+ sources = []
+ deps = []
+ for x in arg:
+ if isinstance(x, (str, mesonlib.File,
+ build.GeneratedList, build.CustomTarget,
+ build.CustomTargetIndex)):
+ sources.append(x)
+ elif hasattr(x, 'found'):
+ if not allow_deps:
+ msg = 'Dependencies are not allowed in the if_false argument.'
+ raise InvalidArguments(msg)
+ deps.append(x)
+ else:
+ msg = 'Sources must be strings or file-like objects.'
+ raise InvalidArguments(msg)
+ mesonlib.check_direntry_issues(sources)
+ return sources, deps
+
+ def check_conditions(self, arg):
+ keys = []
+ deps = []
+ for x in listify(arg):
+ if isinstance(x, str):
+ keys.append(x)
+ elif hasattr(x, 'found'):
+ deps.append(x)
+ else:
+ raise InvalidArguments('Conditions must be strings or dependency object')
+ return keys, deps
+
+ @permittedKwargs(['when', 'if_false', 'if_true'])
+ def add_method(self, state, args, kwargs):
+ if self.frozen:
+ raise InvalidCode('Tried to use \'add\' after querying the source set')
+ when = listify(kwargs.get('when', []))
+ if_true = listify(kwargs.get('if_true', []))
+ if_false = listify(kwargs.get('if_false', []))
+ if not when and not if_true and not if_false:
+ if_true = args
+ elif args:
+ raise InterpreterException('add called with both positional and keyword arguments')
+ keys, dependencies = self.check_conditions(when)
+ sources, extra_deps = self.check_source_files(if_true, True)
+ if_false, _ = self.check_source_files(if_false, False)
+ self.rules.append(SourceSetRule(keys, sources, if_false, [], dependencies, extra_deps))
+
+ @permittedKwargs(['when', 'if_true'])
+ def add_all_method(self, state, args, kwargs):
+ if self.frozen:
+ raise InvalidCode('Tried to use \'add_all\' after querying the source set')
+ when = listify(kwargs.get('when', []))
+ if_true = listify(kwargs.get('if_true', []))
+ if not when and not if_true:
+ if_true = args
+ elif args:
+ raise InterpreterException('add_all called with both positional and keyword arguments')
+ keys, dependencies = self.check_conditions(when)
+ for s in if_true:
+ if not isinstance(s, SourceSet):
+ raise InvalidCode('Arguments to \'add_all\' after the first must be source sets')
+ s.frozen = True
+ self.rules.append(SourceSetRule(keys, [], [], if_true, dependencies, []))
+
+ def collect(self, enabled_fn, all_sources, into=None):
+ if not into:
+ into = SourceFiles(OrderedSet(), OrderedSet())
+ for entry in self.rules:
+ if all(x.found() for x in entry.dependencies) and \
+ all(enabled_fn(key) for key in entry.keys):
+ into.sources.update(entry.sources)
+ into.dependencies.update(entry.dependencies)
+ into.dependencies.update(entry.extra_deps)
+ for ss in entry.sourcesets:
+ ss.collect(enabled_fn, all_sources, into)
+ if not all_sources:
+ continue
+ into.sources.update(entry.if_false)
+ return into
+
+ @noKwargs
+ @noPosargs
+ def all_sources_method(self, state, args, kwargs):
+ self.frozen = True
+ files = self.collect(lambda x: True, True)
+ return list(files.sources)
+
+ @noKwargs
+ @noPosargs
+ @FeatureNew('source_set.all_dependencies() method', '0.52.0')
+ def all_dependencies_method(self, state, args, kwargs):
+ self.frozen = True
+ files = self.collect(lambda x: True, True)
+ return list(files.dependencies)
+
+ @permittedKwargs(['strict'])
+ def apply_method(self, state, args, kwargs):
+ if len(args) != 1:
+ raise InterpreterException('Apply takes exactly one argument')
+ config_data = args[0]
+ self.frozen = True
+ strict = kwargs.get('strict', True)
+ if isinstance(config_data, dict):
+ def _get_from_config_data(key):
+ if strict and key not in config_data:
+ raise InterpreterException(f'Entry {key} not in configuration dictionary.')
+ return config_data.get(key, False)
+ else:
+ config_cache = dict()
+
+ def _get_from_config_data(key):
+ nonlocal config_cache
+ if key not in config_cache:
+ args = [key] if strict else [key, False]
+ config_cache[key] = config_data.get_method(args, {})
+ return config_cache[key]
+
+ files = self.collect(_get_from_config_data, False)
+ res = SourceFilesObject(files)
+ return res
+
+class SourceFilesObject(ModuleObject):
+ def __init__(self, files):
+ super().__init__()
+ self.files = files
+ self.methods.update({
+ 'sources': self.sources_method,
+ 'dependencies': self.dependencies_method,
+ })
+
+ @noPosargs
+ @noKwargs
+ def sources_method(self, state, args, kwargs):
+ return list(self.files.sources)
+
+ @noPosargs
+ @noKwargs
+ def dependencies_method(self, state, args, kwargs):
+ return list(self.files.dependencies)
+
+class SourceSetModule(ExtensionModule):
+ @FeatureNew('SourceSet module', '0.51.0')
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.methods.update({
+ 'source_set': self.source_set,
+ })
+
+ @noKwargs
+ @noPosargs
+ def source_set(self, state, args, kwargs):
+ return SourceSet(self.interpreter)
+
+def initialize(*args, **kwargs):
+ return SourceSetModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/unstable_cuda.py b/meson/mesonbuild/modules/unstable_cuda.py
new file mode 100644
index 000000000..d542fdd54
--- /dev/null
+++ b/meson/mesonbuild/modules/unstable_cuda.py
@@ -0,0 +1,350 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import typing as T
+import re
+
+from ..mesonlib import version_compare
+from ..compilers import CudaCompiler, Compiler
+
+from . import NewExtensionModule
+
+from ..interpreterbase import (
+ flatten, permittedKwargs, noKwargs,
+ InvalidArguments, FeatureNew
+)
+
+class CudaModule(NewExtensionModule):
+
+ @FeatureNew('CUDA module', '0.50.0')
+ def __init__(self, *args, **kwargs):
+ super().__init__()
+ self.methods.update({
+ "min_driver_version": self.min_driver_version,
+ "nvcc_arch_flags": self.nvcc_arch_flags,
+ "nvcc_arch_readable": self.nvcc_arch_readable,
+ })
+
+ @noKwargs
+ def min_driver_version(self, state: 'ModuleState',
+ args: T.Tuple[str],
+ kwargs: T.Dict[str, T.Any]) -> str:
+ argerror = InvalidArguments('min_driver_version must have exactly one positional argument: ' +
+ 'a CUDA Toolkit version string. Beware that, since CUDA 11.0, ' +
+ 'the CUDA Toolkit\'s components (including NVCC) are versioned ' +
+ 'independently from each other (and the CUDA Toolkit as a whole).')
+
+ if len(args) != 1 or not isinstance(args[0], str):
+ raise argerror
+
+ cuda_version = args[0]
+ driver_version_table = [
+ {'cuda_version': '>=11.5.0', 'windows': '496.04', 'linux': '495.29.05'},
+ {'cuda_version': '>=11.4.1', 'windows': '471.41', 'linux': '470.57.02'},
+ {'cuda_version': '>=11.4.0', 'windows': '471.11', 'linux': '470.42.01'},
+ {'cuda_version': '>=11.3.0', 'windows': '465.89', 'linux': '465.19.01'},
+ {'cuda_version': '>=11.2.2', 'windows': '461.33', 'linux': '460.32.03'},
+ {'cuda_version': '>=11.2.1', 'windows': '461.09', 'linux': '460.32.03'},
+ {'cuda_version': '>=11.2.0', 'windows': '460.82', 'linux': '460.27.03'},
+ {'cuda_version': '>=11.1.1', 'windows': '456.81', 'linux': '455.32'},
+ {'cuda_version': '>=11.1.0', 'windows': '456.38', 'linux': '455.23'},
+ {'cuda_version': '>=11.0.3', 'windows': '451.82', 'linux': '450.51.06'},
+ {'cuda_version': '>=11.0.2', 'windows': '451.48', 'linux': '450.51.05'},
+ {'cuda_version': '>=11.0.1', 'windows': '451.22', 'linux': '450.36.06'},
+ {'cuda_version': '>=10.2.89', 'windows': '441.22', 'linux': '440.33'},
+ {'cuda_version': '>=10.1.105', 'windows': '418.96', 'linux': '418.39'},
+ {'cuda_version': '>=10.0.130', 'windows': '411.31', 'linux': '410.48'},
+ {'cuda_version': '>=9.2.148', 'windows': '398.26', 'linux': '396.37'},
+ {'cuda_version': '>=9.2.88', 'windows': '397.44', 'linux': '396.26'},
+ {'cuda_version': '>=9.1.85', 'windows': '391.29', 'linux': '390.46'},
+ {'cuda_version': '>=9.0.76', 'windows': '385.54', 'linux': '384.81'},
+ {'cuda_version': '>=8.0.61', 'windows': '376.51', 'linux': '375.26'},
+ {'cuda_version': '>=8.0.44', 'windows': '369.30', 'linux': '367.48'},
+ {'cuda_version': '>=7.5.16', 'windows': '353.66', 'linux': '352.31'},
+ {'cuda_version': '>=7.0.28', 'windows': '347.62', 'linux': '346.46'},
+ ]
+
+ driver_version = 'unknown'
+ for d in driver_version_table:
+ if version_compare(cuda_version, d['cuda_version']):
+ driver_version = d.get(state.host_machine.system, d['linux'])
+ break
+
+ return driver_version
+
+ @permittedKwargs(['detected'])
+ def nvcc_arch_flags(self, state: 'ModuleState',
+ args: T.Tuple[T.Union[Compiler, CudaCompiler, str]],
+ kwargs: T.Dict[str, T.Any]) -> T.List[str]:
+ nvcc_arch_args = self._validate_nvcc_arch_args(args, kwargs)
+ ret = self._nvcc_arch_flags(*nvcc_arch_args)[0]
+ return ret
+
+ @permittedKwargs(['detected'])
+ def nvcc_arch_readable(self, state: 'ModuleState',
+ args: T.Tuple[T.Union[Compiler, CudaCompiler, str]],
+ kwargs: T.Dict[str, T.Any]) -> T.List[str]:
+ nvcc_arch_args = self._validate_nvcc_arch_args(args, kwargs)
+ ret = self._nvcc_arch_flags(*nvcc_arch_args)[1]
+ return ret
+
+ @staticmethod
+ def _break_arch_string(s):
+ s = re.sub('[ \t\r\n,;]+', ';', s)
+ s = s.strip(';').split(';')
+ return s
+
+ @staticmethod
+ def _detected_cc_from_compiler(c):
+ if isinstance(c, CudaCompiler):
+ return c.detected_cc
+ return ''
+
+ @staticmethod
+ def _version_from_compiler(c):
+ if isinstance(c, CudaCompiler):
+ return c.version
+ if isinstance(c, str):
+ return c
+ return 'unknown'
+
+ def _validate_nvcc_arch_args(self, args, kwargs):
+ argerror = InvalidArguments('The first argument must be an NVCC compiler object, or its version string!')
+
+ if len(args) < 1:
+ raise argerror
+ else:
+ compiler = args[0]
+ cuda_version = self._version_from_compiler(compiler)
+ if cuda_version == 'unknown':
+ raise argerror
+
+ arch_list = [] if len(args) <= 1 else flatten(args[1:])
+ arch_list = [self._break_arch_string(a) for a in arch_list]
+ arch_list = flatten(arch_list)
+ if len(arch_list) > 1 and not set(arch_list).isdisjoint({'All', 'Common', 'Auto'}):
+ raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''')
+ arch_list = arch_list[0] if len(arch_list) == 1 else arch_list
+
+ detected = kwargs.get('detected', self._detected_cc_from_compiler(compiler))
+ detected = flatten([detected])
+ detected = [self._break_arch_string(a) for a in detected]
+ detected = flatten(detected)
+ if not set(detected).isdisjoint({'All', 'Common', 'Auto'}):
+ raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''')
+
+ return cuda_version, arch_list, detected
+
+ def _filter_cuda_arch_list(self, cuda_arch_list, lo=None, hi=None, saturate=None):
+ """
+ Filter CUDA arch list (no codenames) for >= low and < hi architecture
+ bounds, and deduplicate.
+ If saturate is provided, architectures >= hi are replaced with saturate.
+ """
+
+ filtered_cuda_arch_list = []
+ for arch in cuda_arch_list:
+ if arch:
+ if lo and version_compare(arch, '<' + lo):
+ continue
+ if hi and version_compare(arch, '>=' + hi):
+ if not saturate:
+ continue
+ arch = saturate
+ if arch not in filtered_cuda_arch_list:
+ filtered_cuda_arch_list.append(arch)
+ return filtered_cuda_arch_list
+
+ def _nvcc_arch_flags(self, cuda_version, cuda_arch_list='Auto', detected=''):
+ """
+ Using the CUDA Toolkit version and the target architectures, compute
+ the NVCC architecture flags.
+ """
+
+ # Replicates much of the logic of
+ # https://github.com/Kitware/CMake/blob/master/Modules/FindCUDA/select_compute_arch.cmake
+ # except that a bug with cuda_arch_list="All" is worked around by
+ # tracking both lower and upper limits on GPU architectures.
+
+ cuda_known_gpu_architectures = ['Fermi', 'Kepler', 'Maxwell'] # noqa: E221
+ cuda_common_gpu_architectures = ['3.0', '3.5', '5.0'] # noqa: E221
+ cuda_hi_limit_gpu_architecture = None # noqa: E221
+ cuda_lo_limit_gpu_architecture = '2.0' # noqa: E221
+ cuda_all_gpu_architectures = ['3.0', '3.2', '3.5', '5.0'] # noqa: E221
+
+ if version_compare(cuda_version, '<7.0'):
+ cuda_hi_limit_gpu_architecture = '5.2'
+
+ if version_compare(cuda_version, '>=7.0'):
+ cuda_known_gpu_architectures += ['Kepler+Tegra', 'Kepler+Tesla', 'Maxwell+Tegra'] # noqa: E221
+ cuda_common_gpu_architectures += ['5.2'] # noqa: E221
+
+ if version_compare(cuda_version, '<8.0'):
+ cuda_common_gpu_architectures += ['5.2+PTX'] # noqa: E221
+ cuda_hi_limit_gpu_architecture = '6.0' # noqa: E221
+
+ if version_compare(cuda_version, '>=8.0'):
+ cuda_known_gpu_architectures += ['Pascal', 'Pascal+Tegra'] # noqa: E221
+ cuda_common_gpu_architectures += ['6.0', '6.1'] # noqa: E221
+ cuda_all_gpu_architectures += ['6.0', '6.1', '6.2'] # noqa: E221
+
+ if version_compare(cuda_version, '<9.0'):
+ cuda_common_gpu_architectures += ['6.1+PTX'] # noqa: E221
+ cuda_hi_limit_gpu_architecture = '7.0' # noqa: E221
+
+ if version_compare(cuda_version, '>=9.0'):
+ cuda_known_gpu_architectures += ['Volta', 'Xavier'] # noqa: E221
+ cuda_common_gpu_architectures += ['7.0'] # noqa: E221
+ cuda_all_gpu_architectures += ['7.0', '7.2'] # noqa: E221
+ # https://docs.nvidia.com/cuda/archive/9.0/cuda-toolkit-release-notes/index.html#unsupported-features
+ cuda_lo_limit_gpu_architecture = '3.0' # noqa: E221
+
+ if version_compare(cuda_version, '<10.0'):
+ cuda_common_gpu_architectures += ['7.2+PTX'] # noqa: E221
+ cuda_hi_limit_gpu_architecture = '8.0' # noqa: E221
+
+ if version_compare(cuda_version, '>=10.0'):
+ cuda_known_gpu_architectures += ['Turing'] # noqa: E221
+ cuda_common_gpu_architectures += ['7.5'] # noqa: E221
+ cuda_all_gpu_architectures += ['7.5'] # noqa: E221
+
+ if version_compare(cuda_version, '<11.0'):
+ cuda_common_gpu_architectures += ['7.5+PTX'] # noqa: E221
+ cuda_hi_limit_gpu_architecture = '8.0' # noqa: E221
+
+ if version_compare(cuda_version, '>=11.0'):
+ cuda_known_gpu_architectures += ['Ampere'] # noqa: E221
+ cuda_common_gpu_architectures += ['8.0'] # noqa: E221
+ cuda_all_gpu_architectures += ['8.0'] # noqa: E221
+ # https://docs.nvidia.com/cuda/archive/11.0/cuda-toolkit-release-notes/index.html#deprecated-features
+ cuda_lo_limit_gpu_architecture = '3.5' # noqa: E221
+
+ if version_compare(cuda_version, '<11.1'):
+ cuda_common_gpu_architectures += ['8.0+PTX'] # noqa: E221
+ cuda_hi_limit_gpu_architecture = '8.6' # noqa: E221
+
+ if version_compare(cuda_version, '>=11.1'):
+ cuda_common_gpu_architectures += ['8.6', '8.6+PTX'] # noqa: E221
+ cuda_all_gpu_architectures += ['8.6'] # noqa: E221
+
+ if version_compare(cuda_version, '<12.0'):
+ cuda_hi_limit_gpu_architecture = '9.0' # noqa: E221
+
+ if not cuda_arch_list:
+ cuda_arch_list = 'Auto'
+
+ if cuda_arch_list == 'All': # noqa: E271
+ cuda_arch_list = cuda_known_gpu_architectures
+ elif cuda_arch_list == 'Common': # noqa: E271
+ cuda_arch_list = cuda_common_gpu_architectures
+ elif cuda_arch_list == 'Auto': # noqa: E271
+ if detected:
+ if isinstance(detected, list):
+ cuda_arch_list = detected
+ else:
+ cuda_arch_list = self._break_arch_string(detected)
+ cuda_arch_list = self._filter_cuda_arch_list(cuda_arch_list,
+ cuda_lo_limit_gpu_architecture,
+ cuda_hi_limit_gpu_architecture,
+ cuda_common_gpu_architectures[-1])
+ else:
+ cuda_arch_list = cuda_common_gpu_architectures
+ elif isinstance(cuda_arch_list, str):
+ cuda_arch_list = self._break_arch_string(cuda_arch_list)
+
+ cuda_arch_list = sorted([x for x in set(cuda_arch_list) if x])
+
+ cuda_arch_bin = []
+ cuda_arch_ptx = []
+ for arch_name in cuda_arch_list:
+ arch_bin = []
+ arch_ptx = []
+ add_ptx = arch_name.endswith('+PTX')
+ if add_ptx:
+ arch_name = arch_name[:-len('+PTX')]
+
+ if re.fullmatch('[0-9]+\\.[0-9](\\([0-9]+\\.[0-9]\\))?', arch_name):
+ arch_bin, arch_ptx = [arch_name], [arch_name]
+ else:
+ arch_bin, arch_ptx = {
+ 'Fermi': (['2.0', '2.1(2.0)'], []),
+ 'Kepler+Tegra': (['3.2'], []),
+ 'Kepler+Tesla': (['3.7'], []),
+ 'Kepler': (['3.0', '3.5'], ['3.5']),
+ 'Maxwell+Tegra': (['5.3'], []),
+ 'Maxwell': (['5.0', '5.2'], ['5.2']),
+ 'Pascal': (['6.0', '6.1'], ['6.1']),
+ 'Pascal+Tegra': (['6.2'], []),
+ 'Volta': (['7.0'], ['7.0']),
+ 'Xavier': (['7.2'], []),
+ 'Turing': (['7.5'], ['7.5']),
+ 'Ampere': (['8.0'], ['8.0']),
+ }.get(arch_name, (None, None))
+
+ if arch_bin is None:
+ raise InvalidArguments('Unknown CUDA Architecture Name {}!'
+ .format(arch_name))
+
+ cuda_arch_bin += arch_bin
+
+ if add_ptx:
+ if not arch_ptx:
+ arch_ptx = arch_bin
+ cuda_arch_ptx += arch_ptx
+
+ cuda_arch_bin = sorted(list(set(cuda_arch_bin)))
+ cuda_arch_ptx = sorted(list(set(cuda_arch_ptx)))
+
+ nvcc_flags = []
+ nvcc_archs_readable = []
+
+ for arch in cuda_arch_bin:
+ arch, codev = re.fullmatch(
+ '([0-9]+\\.[0-9])(?:\\(([0-9]+\\.[0-9])\\))?', arch).groups()
+
+ if version_compare(arch, '<' + cuda_lo_limit_gpu_architecture):
+ continue
+ if version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture):
+ continue
+
+ if codev:
+ arch = arch.replace('.', '')
+ codev = codev.replace('.', '')
+ nvcc_flags += ['-gencode', 'arch=compute_' + codev + ',code=sm_' + arch]
+ nvcc_archs_readable += ['sm_' + arch]
+ else:
+ arch = arch.replace('.', '')
+ nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=sm_' + arch]
+ nvcc_archs_readable += ['sm_' + arch]
+
+ for arch in cuda_arch_ptx:
+ arch, codev = re.fullmatch(
+ '([0-9]+\\.[0-9])(?:\\(([0-9]+\\.[0-9])\\))?', arch).groups()
+
+ if codev:
+ arch = codev
+
+ if version_compare(arch, '<' + cuda_lo_limit_gpu_architecture):
+ continue
+ if version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture):
+ continue
+
+ arch = arch.replace('.', '')
+ nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=compute_' + arch]
+ nvcc_archs_readable += ['compute_' + arch]
+
+ return nvcc_flags, nvcc_archs_readable
+
+def initialize(*args, **kwargs):
+ return CudaModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/unstable_external_project.py b/meson/mesonbuild/modules/unstable_external_project.py
new file mode 100644
index 000000000..573622696
--- /dev/null
+++ b/meson/mesonbuild/modules/unstable_external_project.py
@@ -0,0 +1,268 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os, subprocess, shlex
+from pathlib import Path
+import typing as T
+
+from . import ExtensionModule, ModuleReturnValue, ModuleState, NewExtensionModule
+from .. import mlog, build
+from ..mesonlib import (MesonException, Popen_safe, MachineChoice,
+ get_variable_regex, do_replacement, extract_as_list)
+from ..interpreterbase import InterpreterException, FeatureNew
+from ..interpreterbase import permittedKwargs, typed_pos_args
+from ..compilers.compilers import CFLAGS_MAPPING, CEXE_MAPPING
+from ..dependencies import InternalDependency, PkgConfigDependency
+from ..mesonlib import OptionKey
+
+class ExternalProject(NewExtensionModule):
+ def __init__(self,
+ state: ModuleState,
+ configure_command: str,
+ configure_options: T.List[str],
+ cross_configure_options: T.List[str],
+ env: build.EnvironmentVariables,
+ verbose: bool):
+ super().__init__()
+ self.methods.update({'dependency': self.dependency_method,
+ })
+
+ self.subdir = Path(state.subdir)
+ self.project_version = state.project_version
+ self.subproject = state.subproject
+ self.env = state.environment
+ self.build_machine = state.build_machine
+ self.host_machine = state.host_machine
+ self.configure_command = configure_command
+ self.configure_options = configure_options
+ self.cross_configure_options = cross_configure_options
+ self.verbose = verbose
+ self.user_env = env
+
+ self.src_dir = Path(self.env.get_source_dir(), self.subdir)
+ self.build_dir = Path(self.env.get_build_dir(), self.subdir, 'build')
+ self.install_dir = Path(self.env.get_build_dir(), self.subdir, 'dist')
+ self.prefix = Path(self.env.coredata.get_option(OptionKey('prefix')))
+ self.libdir = Path(self.env.coredata.get_option(OptionKey('libdir')))
+ self.includedir = Path(self.env.coredata.get_option(OptionKey('includedir')))
+ self.name = self.src_dir.name
+
+ # On Windows if the prefix is "c:/foo" and DESTDIR is "c:/bar", `make`
+ # will install files into "c:/bar/c:/foo" which is an invalid path.
+ # Work around that issue by removing the drive from prefix.
+ if self.prefix.drive:
+ self.prefix = self.prefix.relative_to(self.prefix.drive)
+
+ # self.prefix is an absolute path, so we cannot append it to another path.
+ self.rel_prefix = self.prefix.relative_to(self.prefix.root)
+
+ self.make = state.find_program('make')
+ self.make = self.make.get_command()[0]
+
+ self._configure(state)
+
+ self.targets = self._create_targets()
+
+ def _configure(self, state: ModuleState):
+ # Assume it's the name of a script in source dir, like 'configure',
+ # 'autogen.sh', etc).
+ configure_path = Path(self.src_dir, self.configure_command)
+ configure_prog = state.find_program(configure_path.as_posix())
+ configure_cmd = configure_prog.get_command()
+
+ d = [('PREFIX', '--prefix=@PREFIX@', self.prefix.as_posix()),
+ ('LIBDIR', '--libdir=@PREFIX@/@LIBDIR@', self.libdir.as_posix()),
+ ('INCLUDEDIR', None, self.includedir.as_posix()),
+ ]
+ self._validate_configure_options(d)
+
+ configure_cmd += self._format_options(self.configure_options, d)
+
+ if self.env.is_cross_build():
+ host = '{}-{}-{}'.format(self.host_machine.cpu_family,
+ self.build_machine.system,
+ self.host_machine.system)
+ d = [('HOST', None, host)]
+ configure_cmd += self._format_options(self.cross_configure_options, d)
+
+ # Set common env variables like CFLAGS, CC, etc.
+ link_exelist = []
+ link_args = []
+ self.run_env = os.environ.copy()
+ for lang, compiler in self.env.coredata.compilers[MachineChoice.HOST].items():
+ if any(lang not in i for i in (CEXE_MAPPING, CFLAGS_MAPPING)):
+ continue
+ cargs = self.env.coredata.get_external_args(MachineChoice.HOST, lang)
+ self.run_env[CEXE_MAPPING[lang]] = self._quote_and_join(compiler.get_exelist())
+ self.run_env[CFLAGS_MAPPING[lang]] = self._quote_and_join(cargs)
+ if not link_exelist:
+ link_exelist = compiler.get_linker_exelist()
+ link_args = self.env.coredata.get_external_link_args(MachineChoice.HOST, lang)
+ if link_exelist:
+ # FIXME: Do not pass linker because Meson uses CC as linker wrapper,
+ # but autotools often expects the real linker (e.h. GNU ld).
+ # self.run_env['LD'] = self._quote_and_join(link_exelist)
+ pass
+ self.run_env['LDFLAGS'] = self._quote_and_join(link_args)
+
+ self.run_env = self.user_env.get_env(self.run_env)
+
+ PkgConfigDependency.setup_env(self.run_env, self.env, MachineChoice.HOST,
+ Path(self.env.get_build_dir(), 'meson-uninstalled').as_posix())
+
+ self.build_dir.mkdir(parents=True, exist_ok=True)
+ self._run('configure', configure_cmd)
+
+ def _quote_and_join(self, array: T.List[str]) -> str:
+ return ' '.join([shlex.quote(i) for i in array])
+
+ def _validate_configure_options(self, variables: T.List[T.Tuple[str, str, str]]):
+ # Ensure the user at least try to pass basic info to the build system,
+ # like the prefix, libdir, etc.
+ for key, default, val in variables:
+ if default is None:
+ continue
+ key_format = f'@{key}@'
+ for option in self.configure_options:
+ if key_format in option:
+ break
+ else:
+ FeatureNew('Default configure_option', '0.57.0').use(self.subproject)
+ self.configure_options.append(default)
+
+ def _format_options(self, options: T.List[str], variables: T.List[T.Tuple[str, str, str]]) -> T.List[str]:
+ out = []
+ missing = set()
+ regex = get_variable_regex('meson')
+ confdata = {k: (v, None) for k, d, v in variables}
+ for o in options:
+ arg, missing_vars = do_replacement(regex, o, 'meson', confdata)
+ missing.update(missing_vars)
+ out.append(arg)
+ if missing:
+ var_list = ", ".join(map(repr, sorted(missing)))
+ raise EnvironmentException(
+ f"Variables {var_list} in configure options are missing.")
+ return out
+
+ def _run(self, step: str, command: T.List[str]):
+ mlog.log(f'External project {self.name}:', mlog.bold(step))
+ m = 'Running command ' + str(command) + ' in directory ' + str(self.build_dir) + '\n'
+ log_filename = Path(mlog.log_dir, f'{self.name}-{step}.log')
+ output = None
+ if not self.verbose:
+ output = open(log_filename, 'w', encoding='utf-8')
+ output.write(m + '\n')
+ output.flush()
+ else:
+ mlog.log(m)
+ p, o, e = Popen_safe(command, cwd=str(self.build_dir), env=self.run_env,
+ stderr=subprocess.STDOUT,
+ stdout=output)
+ if p.returncode != 0:
+ m = f'{step} step returned error code {p.returncode}.'
+ if not self.verbose:
+ m += '\nSee logs: ' + str(log_filename)
+ raise MesonException(m)
+
+ def _create_targets(self):
+ cmd = self.env.get_build_command()
+ cmd += ['--internal', 'externalproject',
+ '--name', self.name,
+ '--srcdir', self.src_dir.as_posix(),
+ '--builddir', self.build_dir.as_posix(),
+ '--installdir', self.install_dir.as_posix(),
+ '--logdir', mlog.log_dir,
+ '--make', self.make,
+ ]
+ if self.verbose:
+ cmd.append('--verbose')
+
+ target_kwargs = {'output': f'{self.name}.stamp',
+ 'depfile': f'{self.name}.d',
+ 'command': cmd + ['@OUTPUT@', '@DEPFILE@'],
+ 'console': True,
+ }
+ self.target = build.CustomTarget(self.name,
+ self.subdir.as_posix(),
+ self.subproject,
+ target_kwargs)
+
+ idir = build.InstallDir(self.subdir.as_posix(),
+ Path('dist', self.rel_prefix).as_posix(),
+ install_dir='.',
+ install_mode=None,
+ exclude=None,
+ strip_directory=True,
+ from_source_dir=False,
+ subproject=self.subproject)
+
+ return [self.target, idir]
+
+ @permittedKwargs({'subdir'})
+ @typed_pos_args('external_project.dependency', str)
+ def dependency_method(self, state, args: T.Tuple[str], kwargs):
+ libname = args[0]
+
+ subdir = kwargs.get('subdir', '')
+ if not isinstance(subdir, str):
+ m = 'ExternalProject.dependency subdir keyword argument must be string.'
+ raise InterpreterException(m)
+
+ abs_includedir = Path(self.install_dir, self.rel_prefix, self.includedir)
+ if subdir:
+ abs_includedir = Path(abs_includedir, subdir)
+ abs_libdir = Path(self.install_dir, self.rel_prefix, self.libdir)
+
+ version = self.project_version['version']
+ incdir = []
+ compile_args = [f'-I{abs_includedir}']
+ link_args = [f'-L{abs_libdir}', f'-l{libname}']
+ libs = []
+ libs_whole = []
+ sources = self.target
+ final_deps = []
+ variables = {}
+ dep = InternalDependency(version, incdir, compile_args, link_args, libs,
+ libs_whole, sources, final_deps, variables)
+ return dep
+
+
+class ExternalProjectModule(ExtensionModule):
+ @FeatureNew('External build system Module', '0.56.0')
+ def __init__(self, interpreter):
+ super().__init__(interpreter)
+ self.methods.update({'add_project': self.add_project,
+ })
+
+ @permittedKwargs({'configure_options', 'cross_configure_options', 'verbose', 'env'})
+ @typed_pos_args('external_project_mod.add_project', str)
+ def add_project(self, state: ModuleState, args: T.Tuple[str], kwargs: T.Dict[str, T.Any]):
+ configure_command = args[0]
+ configure_options = extract_as_list(kwargs, 'configure_options')
+ cross_configure_options = extract_as_list(kwargs, 'cross_configure_options')
+ if not cross_configure_options:
+ cross_configure_options = ['--host=@HOST@']
+ verbose = kwargs.get('verbose', False)
+ env = self.interpreter.unpack_env_kwarg(kwargs)
+ project = ExternalProject(state,
+ configure_command,
+ configure_options,
+ cross_configure_options,
+ env, verbose)
+ return ModuleReturnValue(project, project.targets)
+
+
+def initialize(*args, **kwargs):
+ return ExternalProjectModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/unstable_icestorm.py b/meson/mesonbuild/modules/unstable_icestorm.py
new file mode 100644
index 000000000..841e647e4
--- /dev/null
+++ b/meson/mesonbuild/modules/unstable_icestorm.py
@@ -0,0 +1,89 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mesonlib
+from ..interpreterbase import flatten
+from ..interpreterbase import FeatureNew
+
+from . import ExtensionModule
+
+class IceStormModule(ExtensionModule):
+
+ @FeatureNew('FPGA/Icestorm Module', '0.45.0')
+ def __init__(self, interpreter):
+ super().__init__(interpreter)
+ self.yosys_bin = None
+ self.methods.update({
+ 'project': self.project,
+ })
+
+ def detect_binaries(self, state):
+ self.yosys_bin = state.find_program('yosys')
+ self.arachne_bin = state.find_program('arachne-pnr')
+ self.icepack_bin = state.find_program('icepack')
+ self.iceprog_bin = state.find_program('iceprog')
+ self.icetime_bin = state.find_program('icetime')
+
+ def project(self, state, args, kwargs):
+ if not self.yosys_bin:
+ self.detect_binaries(state)
+ if not args:
+ raise mesonlib.MesonException('Project requires at least one argument, which is the project name.')
+ proj_name = args[0]
+ arg_sources = args[1:]
+ if not isinstance(proj_name, str):
+ raise mesonlib.MesonException('Argument must be a string.')
+ kwarg_sources = kwargs.get('sources', [])
+ if not isinstance(kwarg_sources, list):
+ kwarg_sources = [kwarg_sources]
+ all_sources = self.interpreter.source_strings_to_files(flatten(arg_sources + kwarg_sources))
+ if 'constraint_file' not in kwargs:
+ raise mesonlib.MesonException('Constraint file not specified.')
+
+ constraint_file = self.interpreter.source_strings_to_files(kwargs['constraint_file'])
+ if len(constraint_file) != 1:
+ raise mesonlib.MesonException('Constraint file must contain one and only one entry.')
+ blif_name = proj_name + '_blif'
+ blif_fname = proj_name + '.blif'
+ asc_name = proj_name + '_asc'
+ asc_fname = proj_name + '.asc'
+ bin_name = proj_name + '_bin'
+ bin_fname = proj_name + '.bin'
+ time_name = proj_name + '-time'
+ upload_name = proj_name + '-upload'
+
+ blif_target = self.interpreter.func_custom_target(None, [blif_name], {
+ 'input': all_sources,
+ 'output': blif_fname,
+ 'command': [self.yosys_bin, '-q', '-p', 'synth_ice40 -blif @OUTPUT@', '@INPUT@']})
+
+ asc_target = self.interpreter.func_custom_target(None, [asc_name], {
+ 'input': blif_target,
+ 'output': asc_fname,
+ 'command': [self.arachne_bin, '-q', '-d', '1k', '-p', constraint_file, '@INPUT@', '-o', '@OUTPUT@']})
+
+ bin_target = self.interpreter.func_custom_target(None, [bin_name], {
+ 'input': asc_target,
+ 'output': bin_fname,
+ 'command': [self.icepack_bin, '@INPUT@', '@OUTPUT@'],
+ 'build_by_default': True})
+
+ self.interpreter.func_run_target(None, [upload_name], {
+ 'command': [self.iceprog_bin, bin_target]})
+
+ self.interpreter.func_run_target(None, [time_name], {
+ 'command': [self.icetime_bin, bin_target]})
+
+def initialize(*args, **kwargs):
+ return IceStormModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/unstable_rust.py b/meson/mesonbuild/modules/unstable_rust.py
new file mode 100644
index 000000000..995370a7d
--- /dev/null
+++ b/meson/mesonbuild/modules/unstable_rust.py
@@ -0,0 +1,227 @@
+# Copyright © 2020 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import typing as T
+
+from . import ExtensionModule, ModuleReturnValue
+from .. import mlog
+from ..build import BuildTarget, CustomTargetIndex, Executable, GeneratedList, InvalidArguments, IncludeDirs, CustomTarget
+from ..interpreter.interpreter import TEST_KWARGS
+from ..interpreterbase import ContainerTypeInfo, InterpreterException, KwargInfo, permittedKwargs, FeatureNew, typed_kwargs, typed_pos_args, noPosargs
+from ..mesonlib import stringlistify, listify, typeslistify, File
+from ..dependencies import Dependency, ExternalLibrary
+from ..interpreterbase import InterpreterException, permittedKwargs, FeatureNew, typed_pos_args, noPosargs
+from ..mesonlib import stringlistify, listify, typeslistify, File
+
+if T.TYPE_CHECKING:
+ from . import ModuleState
+ from ..interpreter import Interpreter
+ from ..interpreter import kwargs as _kwargs
+ from ..interpreter.interpreter import SourceOutputs
+ from ..programs import ExternalProgram
+
+ class FuncTest(_kwargs.BaseTest):
+
+ dependencies: T.List[T.Union[Dependency, ExternalLibrary]]
+ is_parallel: bool
+
+
+class RustModule(ExtensionModule):
+
+ """A module that holds helper functions for rust."""
+
+ @FeatureNew('rust module', '0.57.0')
+ def __init__(self, interpreter: 'Interpreter') -> None:
+ super().__init__(interpreter)
+ self._bindgen_bin: T.Optional['ExternalProgram'] = None
+ self.methods.update({
+ 'test': self.test,
+ 'bindgen': self.bindgen,
+ })
+
+ @typed_pos_args('rust.test', str, BuildTarget)
+ @typed_kwargs(
+ 'rust.test',
+ *TEST_KWARGS,
+ KwargInfo('is_parallel', bool, default=False),
+ KwargInfo(
+ 'dependencies',
+ ContainerTypeInfo(list, (Dependency, ExternalLibrary)),
+ listify=True,
+ default=[]),
+ )
+ def test(self, state: 'ModuleState', args: T.Tuple[str, BuildTarget], kwargs: 'FuncTest') -> ModuleReturnValue:
+ """Generate a rust test target from a given rust target.
+
+ Rust puts it's unitests inside it's main source files, unlike most
+ languages that put them in external files. This means that normally
+ you have to define two separate targets with basically the same
+ arguments to get tests:
+
+ ```meson
+ rust_lib_sources = [...]
+ rust_lib = static_library(
+ 'rust_lib',
+ rust_lib_sources,
+ )
+
+ rust_lib_test = executable(
+ 'rust_lib_test',
+ rust_lib_sources,
+ rust_args : ['--test'],
+ )
+
+ test(
+ 'rust_lib_test',
+ rust_lib_test,
+ protocol : 'rust',
+ )
+ ```
+
+ This is all fine, but not very DRY. This method makes it much easier
+ to define rust tests:
+
+ ```meson
+ rust = import('unstable-rust')
+
+ rust_lib = static_library(
+ 'rust_lib',
+ [sources],
+ )
+
+ rust.test('rust_lib_test', rust_lib)
+ ```
+ """
+ name = args[0]
+ base_target: BuildTarget = args[1]
+ if not base_target.uses_rust():
+ raise InterpreterException('Second positional argument to rustmod.test() must be a rust based target')
+ extra_args = kwargs['args']
+
+ # Delete any arguments we don't want passed
+ if '--test' in extra_args:
+ mlog.warning('Do not add --test to rustmod.test arguments')
+ extra_args.remove('--test')
+ if '--format' in extra_args:
+ mlog.warning('Do not add --format to rustmod.test arguments')
+ i = extra_args.index('--format')
+ # Also delete the argument to --format
+ del extra_args[i + 1]
+ del extra_args[i]
+ for i, a in enumerate(extra_args):
+ if isinstance(a, str) and a.startswith('--format='):
+ del extra_args[i]
+ break
+
+ dependencies = [d for d in kwargs['dependencies']]
+
+ # We need to cast here, as currently these don't have protocol in them, but test itself does.
+ tkwargs = T.cast('_kwargs.FuncTest', kwargs.copy())
+
+ tkwargs['args'] = extra_args + ['--test', '--format', 'pretty']
+ tkwargs['protocol'] = 'rust'
+
+ new_target_kwargs = base_target.kwargs.copy()
+ # Don't mutate the shallow copied list, instead replace it with a new
+ # one
+ new_target_kwargs['rust_args'] = new_target_kwargs.get('rust_args', []) + ['--test']
+ new_target_kwargs['install'] = False
+ new_target_kwargs['dependencies'] = new_target_kwargs.get('dependencies', []) + dependencies
+
+ new_target = Executable(
+ name, base_target.subdir, state.subproject,
+ base_target.for_machine, base_target.sources,
+ base_target.objects, base_target.environment,
+ new_target_kwargs
+ )
+
+ test = self.interpreter.make_test(
+ self.interpreter.current_node, (name, new_target), tkwargs)
+
+ return ModuleReturnValue(None, [new_target, test])
+
+ @noPosargs
+ @permittedKwargs({'input', 'output', 'include_directories', 'c_args', 'args'})
+ def bindgen(self, state: 'ModuleState', args: T.List, kwargs: T.Dict[str, T.Any]) -> ModuleReturnValue:
+ """Wrapper around bindgen to simplify it's use.
+
+ The main thing this simplifies is the use of `include_directory`
+ objects, instead of having to pass a plethora of `-I` arguments.
+ """
+ header: 'SourceOutputs'
+ _deps: T.Sequence['SourceOutputs']
+ try:
+ header, *_deps = self.interpreter.source_strings_to_files(listify(kwargs['input']))
+ except KeyError:
+ raise InvalidArguments('rustmod.bindgen() `input` argument must have at least one element.')
+
+ try:
+ output: str = kwargs['output']
+ except KeyError:
+ raise InvalidArguments('rustmod.bindgen() `output` must be provided')
+ if not isinstance(output, str):
+ raise InvalidArguments('rustmod.bindgen() `output` argument must be a string.')
+
+ include_dirs: T.List[IncludeDirs] = typeslistify(listify(kwargs.get('include_directories', [])), IncludeDirs)
+ c_args: T.List[str] = stringlistify(listify(kwargs.get('c_args', [])))
+ bind_args: T.List[str] = stringlistify(listify(kwargs.get('args', [])))
+
+ # Split File and Target dependencies to add pass to CustomTarget
+ depends: T.List[T.Union[GeneratedList, BuildTarget, CustomTargetIndex, CustomTarget]] = []
+ depend_files: T.List[File] = []
+ for d in _deps:
+ if isinstance(d, File):
+ depend_files.append(d)
+ else:
+ depends.append(d)
+
+ inc_strs: T.List[str] = []
+ for i in include_dirs:
+ # bindgen always uses clang, so it's safe to hardcode -I here
+ inc_strs.extend([f'-I{x}' for x in i.to_string_list(state.environment.get_source_dir())])
+
+ if self._bindgen_bin is None:
+ self._bindgen_bin = state.find_program('bindgen')
+
+ name: str
+ if isinstance(header, File):
+ name = header.fname
+ else:
+ name = header.get_outputs()[0]
+
+ target = CustomTarget(
+ f'rustmod-bindgen-{name}'.replace('/', '_'),
+ state.subdir,
+ state.subproject,
+ {
+ 'input': header,
+ 'output': output,
+ 'command': self._bindgen_bin.get_command() + [
+ '@INPUT@', '--output',
+ os.path.join(state.environment.build_dir, '@OUTPUT@')] +
+ bind_args + ['--'] + c_args + inc_strs +
+ ['-MD', '-MQ', '@INPUT@', '-MF', '@DEPFILE@'],
+ 'depfile': '@PLAINNAME@.d',
+ 'depends': depends,
+ 'depend_files': depend_files,
+ },
+ backend=state.backend,
+ )
+
+ return ModuleReturnValue([target], [target])
+
+
+def initialize(*args: T.List, **kwargs: T.Dict) -> RustModule:
+ return RustModule(*args, **kwargs) # type: ignore
diff --git a/meson/mesonbuild/modules/unstable_simd.py b/meson/mesonbuild/modules/unstable_simd.py
new file mode 100644
index 000000000..3339cea5a
--- /dev/null
+++ b/meson/mesonbuild/modules/unstable_simd.py
@@ -0,0 +1,88 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mesonlib, compilers, mlog
+
+from . import ExtensionModule
+
+from ..interpreterbase import FeatureNew
+
+class SimdModule(ExtensionModule):
+
+ @FeatureNew('SIMD module', '0.42.0')
+ def __init__(self, interpreter):
+ super().__init__(interpreter)
+ # FIXME add Altivec and AVX512.
+ self.isets = ('mmx',
+ 'sse',
+ 'sse2',
+ 'sse3',
+ 'ssse3',
+ 'sse41',
+ 'sse42',
+ 'avx',
+ 'avx2',
+ 'neon',
+ )
+ self.methods.update({
+ 'check': self.check,
+ })
+
+ def check(self, state, args, kwargs):
+ result = []
+ if len(args) != 1:
+ raise mesonlib.MesonException('Check requires one argument, a name prefix for checks.')
+ prefix = args[0]
+ if not isinstance(prefix, str):
+ raise mesonlib.MesonException('Argument must be a string.')
+ if 'compiler' not in kwargs:
+ raise mesonlib.MesonException('Must specify compiler keyword')
+ if 'sources' in kwargs:
+ raise mesonlib.MesonException('SIMD module does not support the "sources" keyword')
+ basic_kwargs = {}
+ for key, value in kwargs.items():
+ if key not in self.isets and key != 'compiler':
+ basic_kwargs[key] = value
+ compiler = kwargs['compiler']
+ if not isinstance(compiler, compilers.compilers.Compiler):
+ raise mesonlib.MesonException('Compiler argument must be a compiler object.')
+ cdata = self.interpreter.func_configuration_data(None, [], {})
+ conf = cdata.conf_data
+ for iset in self.isets:
+ if iset not in kwargs:
+ continue
+ iset_fname = kwargs[iset] # Might also be an array or Files. static_library will validate.
+ args = compiler.get_instruction_set_args(iset)
+ if args is None:
+ mlog.log('Compiler supports %s:' % iset, mlog.red('NO'))
+ continue
+ if args:
+ if not compiler.has_multi_arguments(args, state.environment)[0]:
+ mlog.log('Compiler supports %s:' % iset, mlog.red('NO'))
+ continue
+ mlog.log('Compiler supports %s:' % iset, mlog.green('YES'))
+ conf.values['HAVE_' + iset.upper()] = ('1', 'Compiler supports %s.' % iset)
+ libname = prefix + '_' + iset
+ lib_kwargs = {'sources': iset_fname,
+ }
+ lib_kwargs.update(basic_kwargs)
+ langarg_key = compiler.get_language() + '_args'
+ old_lang_args = mesonlib.extract_as_list(lib_kwargs, langarg_key)
+ all_lang_args = old_lang_args + args
+ lib_kwargs[langarg_key] = all_lang_args
+ result.append(self.interpreter.func_static_lib(None, [libname], lib_kwargs))
+ return [result, cdata]
+
+def initialize(*args, **kwargs):
+ return SimdModule(*args, **kwargs)
diff --git a/meson/mesonbuild/modules/windows.py b/meson/mesonbuild/modules/windows.py
new file mode 100644
index 000000000..7f627cff1
--- /dev/null
+++ b/meson/mesonbuild/modules/windows.py
@@ -0,0 +1,171 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import enum
+import os
+import re
+
+from .. import mlog
+from .. import mesonlib, build
+from ..mesonlib import MachineChoice, MesonException, extract_as_list
+from . import ModuleReturnValue
+from . import ExtensionModule
+from ..interpreterbase import permittedKwargs, FeatureNewKwargs, flatten
+from ..programs import ExternalProgram
+
+class ResourceCompilerType(enum.Enum):
+ windres = 1
+ rc = 2
+ wrc = 3
+
+class WindowsModule(ExtensionModule):
+ def __init__(self, interpreter):
+ super().__init__(interpreter)
+ self.methods.update({
+ 'compile_resources': self.compile_resources,
+ })
+
+ def detect_compiler(self, compilers):
+ for l in ('c', 'cpp'):
+ if l in compilers:
+ return compilers[l]
+ raise MesonException('Resource compilation requires a C or C++ compiler.')
+
+ def _find_resource_compiler(self, state):
+ # FIXME: Does not handle `native: true` executables, see
+ # See https://github.com/mesonbuild/meson/issues/1531
+ # Take a parameter instead of the hardcoded definition below
+ for_machine = MachineChoice.HOST
+
+ if hasattr(self, '_rescomp'):
+ return self._rescomp
+
+ # Will try cross / native file and then env var
+ rescomp = ExternalProgram.from_bin_list(state.environment, for_machine, 'windres')
+
+ if not rescomp or not rescomp.found():
+ comp = self.detect_compiler(state.environment.coredata.compilers[for_machine])
+ if comp.id in {'msvc', 'clang-cl', 'intel-cl'}:
+ rescomp = ExternalProgram('rc', silent=True)
+ else:
+ rescomp = ExternalProgram('windres', silent=True)
+
+ if not rescomp.found():
+ raise MesonException('Could not find Windows resource compiler')
+
+ for (arg, match, rc_type) in [
+ ('/?', '^.*Microsoft.*Resource Compiler.*$', ResourceCompilerType.rc),
+ ('--version', '^.*GNU windres.*$', ResourceCompilerType.windres),
+ ('--version', '^.*Wine Resource Compiler.*$', ResourceCompilerType.wrc),
+ ]:
+ p, o, e = mesonlib.Popen_safe(rescomp.get_command() + [arg])
+ m = re.search(match, o, re.MULTILINE)
+ if m:
+ mlog.log('Windows resource compiler: %s' % m.group())
+ self._rescomp = (rescomp, rc_type)
+ break
+ else:
+ raise MesonException('Could not determine type of Windows resource compiler')
+
+ return self._rescomp
+
+ @FeatureNewKwargs('windows.compile_resources', '0.47.0', ['depend_files', 'depends'])
+ @permittedKwargs({'args', 'include_directories', 'depend_files', 'depends'})
+ def compile_resources(self, state, args, kwargs):
+ extra_args = mesonlib.stringlistify(flatten(kwargs.get('args', [])))
+ wrc_depend_files = extract_as_list(kwargs, 'depend_files', pop = True)
+ wrc_depends = extract_as_list(kwargs, 'depends', pop = True)
+ for d in wrc_depends:
+ if isinstance(d, build.CustomTarget):
+ extra_args += state.get_include_args([
+ build.IncludeDirs('', [], False, [os.path.join('@BUILD_ROOT@', self.interpreter.backend.get_target_dir(d))])
+ ])
+ inc_dirs = extract_as_list(kwargs, 'include_directories', pop = True)
+ for incd in inc_dirs:
+ if not isinstance(incd, (str, build.IncludeDirs)):
+ raise MesonException('Resource include dirs should be include_directories().')
+ extra_args += state.get_include_args(inc_dirs)
+
+ rescomp, rescomp_type = self._find_resource_compiler(state)
+ if rescomp_type == ResourceCompilerType.rc:
+ # RC is used to generate .res files, a special binary resource
+ # format, which can be passed directly to LINK (apparently LINK uses
+ # CVTRES internally to convert this to a COFF object)
+ suffix = 'res'
+ res_args = extra_args + ['/nologo', '/fo@OUTPUT@', '@INPUT@']
+ elif rescomp_type == ResourceCompilerType.windres:
+ # ld only supports object files, so windres is used to generate a
+ # COFF object
+ suffix = 'o'
+ res_args = extra_args + ['@INPUT@', '@OUTPUT@']
+
+ m = 'Argument {!r} has a space which may not work with windres due to ' \
+ 'a MinGW bug: https://sourceware.org/bugzilla/show_bug.cgi?id=4933'
+ for arg in extra_args:
+ if ' ' in arg:
+ mlog.warning(m.format(arg), fatal=False)
+ else:
+ suffix = 'o'
+ res_args = extra_args + ['@INPUT@', '-o', '@OUTPUT@']
+
+ res_targets = []
+
+ def add_target(src):
+ if isinstance(src, list):
+ for subsrc in src:
+ add_target(subsrc)
+ return
+
+ if isinstance(src, str):
+ name_formatted = src
+ name = os.path.join(state.subdir, src)
+ elif isinstance(src, mesonlib.File):
+ name_formatted = src.fname
+ name = src.relative_name()
+ elif isinstance(src, build.CustomTarget):
+ if len(src.get_outputs()) > 1:
+ raise MesonException('windows.compile_resources does not accept custom targets with more than 1 output.')
+
+ # Chances are that src.get_filename() is already the name of that
+ # target, add a prefix to avoid name clash.
+ name_formatted = 'windows_compile_resources_' + src.get_filename()
+ name = src.get_id()
+ else:
+ raise MesonException(f'Unexpected source type {src!r}. windows.compile_resources accepts only strings, files, custom targets, and lists thereof.')
+
+ # Path separators are not allowed in target names
+ name = name.replace('/', '_').replace('\\', '_')
+ name_formatted = name_formatted.replace('/', '_').replace('\\', '_')
+
+ res_kwargs = {
+ 'output': name + '_@BASENAME@.' + suffix,
+ 'input': [src],
+ 'command': [rescomp] + res_args,
+ 'depend_files': wrc_depend_files,
+ 'depends': wrc_depends,
+ }
+
+ # instruct binutils windres to generate a preprocessor depfile
+ if rescomp_type == ResourceCompilerType.windres:
+ res_kwargs['depfile'] = res_kwargs['output'] + '.d'
+ res_kwargs['command'] += ['--preprocessor-arg=-MD', '--preprocessor-arg=-MQ@OUTPUT@', '--preprocessor-arg=-MF@DEPFILE@']
+
+ res_targets.append(build.CustomTarget(name_formatted, state.subdir, state.subproject, res_kwargs))
+
+ add_target(args)
+
+ return ModuleReturnValue(res_targets, [res_targets])
+
+def initialize(*args, **kwargs):
+ return WindowsModule(*args, **kwargs)
diff --git a/meson/mesonbuild/mparser.py b/meson/mesonbuild/mparser.py
new file mode 100644
index 000000000..10796827c
--- /dev/null
+++ b/meson/mesonbuild/mparser.py
@@ -0,0 +1,814 @@
+# Copyright 2014-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+import codecs
+import textwrap
+import types
+import typing as T
+from .mesonlib import MesonException
+from . import mlog
+
+if T.TYPE_CHECKING:
+ from .ast import AstVisitor
+
+# This is the regex for the supported escape sequences of a regular string
+# literal, like 'abc\x00'
+ESCAPE_SEQUENCE_SINGLE_RE = re.compile(r'''
+ ( \\U[A-Fa-f0-9]{8} # 8-digit hex escapes
+ | \\u[A-Fa-f0-9]{4} # 4-digit hex escapes
+ | \\x[A-Fa-f0-9]{2} # 2-digit hex escapes
+ | \\[0-7]{1,3} # Octal escapes
+ | \\N\{[^}]+\} # Unicode characters by name
+ | \\[\\'abfnrtv] # Single-character escapes
+ )''', re.UNICODE | re.VERBOSE)
+
+class MesonUnicodeDecodeError(MesonException):
+ def __init__(self, match: str) -> None:
+ super().__init__(match)
+ self.match = match
+
+def decode_match(match: T.Match[str]) -> str:
+ try:
+ return codecs.decode(match.group(0).encode(), 'unicode_escape')
+ except UnicodeDecodeError:
+ raise MesonUnicodeDecodeError(match.group(0))
+
+class ParseException(MesonException):
+ def __init__(self, text: str, line: str, lineno: int, colno: int) -> None:
+ # Format as error message, followed by the line with the error, followed by a caret to show the error column.
+ super().__init__("{}\n{}\n{}".format(text, line, '{}^'.format(' ' * colno)))
+ self.lineno = lineno
+ self.colno = colno
+
+class BlockParseException(MesonException):
+ def __init__(
+ self,
+ text: str,
+ line: str,
+ lineno: int,
+ colno: int,
+ start_line: str,
+ start_lineno: int,
+ start_colno: int,
+ ) -> None:
+ # This can be formatted in two ways - one if the block start and end are on the same line, and a different way if they are on different lines.
+
+ if lineno == start_lineno:
+ # If block start and end are on the same line, it is formatted as:
+ # Error message
+ # Followed by the line with the error
+ # Followed by a caret to show the block start
+ # Followed by underscores
+ # Followed by a caret to show the block end.
+ super().__init__("{}\n{}\n{}".format(text, line, '{}^{}^'.format(' ' * start_colno, '_' * (colno - start_colno - 1))))
+ else:
+ # If block start and end are on different lines, it is formatted as:
+ # Error message
+ # Followed by the line with the error
+ # Followed by a caret to show the error column.
+ # Followed by a message saying where the block started.
+ # Followed by the line of the block start.
+ # Followed by a caret for the block start.
+ super().__init__("%s\n%s\n%s\nFor a block that started at %d,%d\n%s\n%s" % (text, line, '%s^' % (' ' * colno), start_lineno, start_colno, start_line, "%s^" % (' ' * start_colno)))
+ self.lineno = lineno
+ self.colno = colno
+
+TV_TokenTypes = T.TypeVar('TV_TokenTypes', int, str, bool)
+
+class Token(T.Generic[TV_TokenTypes]):
+ def __init__(self, tid: str, filename: str, line_start: int, lineno: int, colno: int, bytespan: T.Tuple[int, int], value: TV_TokenTypes):
+ self.tid = tid # type: str
+ self.filename = filename # type: str
+ self.line_start = line_start # type: int
+ self.lineno = lineno # type: int
+ self.colno = colno # type: int
+ self.bytespan = bytespan # type: T.Tuple[int, int]
+ self.value = value # type: TV_TokenTypes
+
+ def __eq__(self, other: object) -> bool:
+ if isinstance(other, str):
+ return self.tid == other
+ elif isinstance(other, Token):
+ return self.tid == other.tid
+ return NotImplemented
+
+class Lexer:
+ def __init__(self, code: str):
+ self.code = code
+ self.keywords = {'true', 'false', 'if', 'else', 'elif',
+ 'endif', 'and', 'or', 'not', 'foreach', 'endforeach',
+ 'in', 'continue', 'break'}
+ self.future_keywords = {'return'}
+ self.token_specification = [
+ # Need to be sorted longest to shortest.
+ ('ignore', re.compile(r'[ \t]')),
+ ('fstring', re.compile(r"f'([^'\\]|(\\.))*'")),
+ ('id', re.compile('[_a-zA-Z][_0-9a-zA-Z]*')),
+ ('number', re.compile(r'0[bB][01]+|0[oO][0-7]+|0[xX][0-9a-fA-F]+|0|[1-9]\d*')),
+ ('eol_cont', re.compile(r'\\\n')),
+ ('eol', re.compile(r'\n')),
+ ('multiline_string', re.compile(r"'''(.|\n)*?'''", re.M)),
+ ('comment', re.compile(r'#.*')),
+ ('lparen', re.compile(r'\(')),
+ ('rparen', re.compile(r'\)')),
+ ('lbracket', re.compile(r'\[')),
+ ('rbracket', re.compile(r'\]')),
+ ('lcurl', re.compile(r'\{')),
+ ('rcurl', re.compile(r'\}')),
+ ('dblquote', re.compile(r'"')),
+ ('string', re.compile(r"'([^'\\]|(\\.))*'")),
+ ('comma', re.compile(r',')),
+ ('plusassign', re.compile(r'\+=')),
+ ('dot', re.compile(r'\.')),
+ ('plus', re.compile(r'\+')),
+ ('dash', re.compile(r'-')),
+ ('star', re.compile(r'\*')),
+ ('percent', re.compile(r'%')),
+ ('fslash', re.compile(r'/')),
+ ('colon', re.compile(r':')),
+ ('equal', re.compile(r'==')),
+ ('nequal', re.compile(r'!=')),
+ ('assign', re.compile(r'=')),
+ ('le', re.compile(r'<=')),
+ ('lt', re.compile(r'<')),
+ ('ge', re.compile(r'>=')),
+ ('gt', re.compile(r'>')),
+ ('questionmark', re.compile(r'\?')),
+ ]
+
+ def getline(self, line_start: int) -> str:
+ return self.code[line_start:self.code.find('\n', line_start)]
+
+ def lex(self, filename: str) -> T.Generator[Token, None, None]:
+ line_start = 0
+ lineno = 1
+ loc = 0
+ par_count = 0
+ bracket_count = 0
+ curl_count = 0
+ col = 0
+ while loc < len(self.code):
+ matched = False
+ value = None # type: T.Union[str, bool, int]
+ for (tid, reg) in self.token_specification:
+ mo = reg.match(self.code, loc)
+ if mo:
+ curline = lineno
+ curline_start = line_start
+ col = mo.start() - line_start
+ matched = True
+ span_start = loc
+ loc = mo.end()
+ span_end = loc
+ bytespan = (span_start, span_end)
+ match_text = mo.group()
+ if tid == 'ignore' or tid == 'comment':
+ break
+ elif tid == 'lparen':
+ par_count += 1
+ elif tid == 'rparen':
+ par_count -= 1
+ elif tid == 'lbracket':
+ bracket_count += 1
+ elif tid == 'rbracket':
+ bracket_count -= 1
+ elif tid == 'lcurl':
+ curl_count += 1
+ elif tid == 'rcurl':
+ curl_count -= 1
+ elif tid == 'dblquote':
+ raise ParseException('Double quotes are not supported. Use single quotes.', self.getline(line_start), lineno, col)
+ elif tid in {'string', 'fstring'}:
+ # Handle here and not on the regexp to give a better error message.
+ if match_text.find("\n") != -1:
+ mlog.warning(textwrap.dedent("""\
+ Newline character in a string detected, use ''' (three single quotes) for multiline strings instead.
+ This will become a hard error in a future Meson release.\
+ """),
+ self.getline(line_start),
+ str(lineno),
+ str(col)
+ )
+ value = match_text[2 if tid == 'fstring' else 1:-1]
+ try:
+ value = ESCAPE_SEQUENCE_SINGLE_RE.sub(decode_match, value)
+ except MesonUnicodeDecodeError as err:
+ raise MesonException(f"Failed to parse escape sequence: '{err.match}' in string:\n {match_text}")
+ elif tid == 'multiline_string':
+ tid = 'string'
+ value = match_text[3:-3]
+ lines = match_text.split('\n')
+ if len(lines) > 1:
+ lineno += len(lines) - 1
+ line_start = mo.end() - len(lines[-1])
+ elif tid == 'number':
+ value = int(match_text, base=0)
+ elif tid == 'eol_cont':
+ lineno += 1
+ line_start = loc
+ break
+ elif tid == 'eol':
+ lineno += 1
+ line_start = loc
+ if par_count > 0 or bracket_count > 0 or curl_count > 0:
+ break
+ elif tid == 'id':
+ if match_text in self.keywords:
+ tid = match_text
+ else:
+ if match_text in self.future_keywords:
+ mlog.warning(f"Identifier '{match_text}' will become a reserved keyword in a future release. Please rename it.",
+ location=types.SimpleNamespace(filename=filename, lineno=lineno))
+ value = match_text
+ yield Token(tid, filename, curline_start, curline, col, bytespan, value)
+ break
+ if not matched:
+ raise ParseException('lexer', self.getline(line_start), lineno, col)
+
+class BaseNode:
+ def __init__(self, lineno: int, colno: int, filename: str, end_lineno: T.Optional[int] = None, end_colno: T.Optional[int] = None):
+ self.lineno = lineno # type: int
+ self.colno = colno # type: int
+ self.filename = filename # type: str
+ self.end_lineno = end_lineno if end_lineno is not None else self.lineno
+ self.end_colno = end_colno if end_colno is not None else self.colno
+
+ # Attributes for the visitors
+ self.level = 0 # type: int
+ self.ast_id = '' # type: str
+ self.condition_level = 0 # type: int
+
+ def accept(self, visitor: 'AstVisitor') -> None:
+ fname = 'visit_{}'.format(type(self).__name__)
+ if hasattr(visitor, fname):
+ func = getattr(visitor, fname)
+ if callable(func):
+ func(self)
+
+class ElementaryNode(T.Generic[TV_TokenTypes], BaseNode):
+ def __init__(self, token: Token[TV_TokenTypes]):
+ super().__init__(token.lineno, token.colno, token.filename)
+ self.value = token.value # type: TV_TokenTypes
+ self.bytespan = token.bytespan # type: T.Tuple[int, int]
+
+class BooleanNode(ElementaryNode[bool]):
+ def __init__(self, token: Token[bool]):
+ super().__init__(token)
+ assert isinstance(self.value, bool)
+
+class IdNode(ElementaryNode[str]):
+ def __init__(self, token: Token[str]):
+ super().__init__(token)
+ assert isinstance(self.value, str)
+
+ def __str__(self) -> str:
+ return "Id node: '%s' (%d, %d)." % (self.value, self.lineno, self.colno)
+
+class NumberNode(ElementaryNode[int]):
+ def __init__(self, token: Token[int]):
+ super().__init__(token)
+ assert isinstance(self.value, int)
+
+class StringNode(ElementaryNode[str]):
+ def __init__(self, token: Token[str]):
+ super().__init__(token)
+ assert isinstance(self.value, str)
+
+ def __str__(self) -> str:
+ return "String node: '%s' (%d, %d)." % (self.value, self.lineno, self.colno)
+
+class FormatStringNode(ElementaryNode[str]):
+ def __init__(self, token: Token[str]):
+ super().__init__(token)
+ assert isinstance(self.value, str)
+
+ def __str__(self) -> str:
+ return "Format string node: '{self.value}' ({self.lineno}, {self.colno})."
+
+class ContinueNode(ElementaryNode):
+ pass
+
+class BreakNode(ElementaryNode):
+ pass
+
+class ArgumentNode(BaseNode):
+ def __init__(self, token: Token[TV_TokenTypes]):
+ super().__init__(token.lineno, token.colno, token.filename)
+ self.arguments = [] # type: T.List[BaseNode]
+ self.commas = [] # type: T.List[Token[TV_TokenTypes]]
+ self.kwargs = {} # type: T.Dict[BaseNode, BaseNode]
+ self.order_error = False
+
+ def prepend(self, statement: BaseNode) -> None:
+ if self.num_kwargs() > 0:
+ self.order_error = True
+ if not isinstance(statement, EmptyNode):
+ self.arguments = [statement] + self.arguments
+
+ def append(self, statement: BaseNode) -> None:
+ if self.num_kwargs() > 0:
+ self.order_error = True
+ if not isinstance(statement, EmptyNode):
+ self.arguments += [statement]
+
+ def set_kwarg(self, name: IdNode, value: BaseNode) -> None:
+ if name.value in [x.value for x in self.kwargs.keys() if isinstance(x, IdNode)]:
+ mlog.warning(f'Keyword argument "{name.value}" defined multiple times.', location=self)
+ mlog.warning('This will be an error in future Meson releases.')
+ self.kwargs[name] = value
+
+ def set_kwarg_no_check(self, name: BaseNode, value: BaseNode) -> None:
+ self.kwargs[name] = value
+
+ def num_args(self) -> int:
+ return len(self.arguments)
+
+ def num_kwargs(self) -> int:
+ return len(self.kwargs)
+
+ def incorrect_order(self) -> bool:
+ return self.order_error
+
+ def __len__(self) -> int:
+ return self.num_args() # Fixme
+
+class ArrayNode(BaseNode):
+ def __init__(self, args: ArgumentNode, lineno: int, colno: int, end_lineno: int, end_colno: int):
+ super().__init__(lineno, colno, args.filename, end_lineno=end_lineno, end_colno=end_colno)
+ self.args = args # type: ArgumentNode
+
+class DictNode(BaseNode):
+ def __init__(self, args: ArgumentNode, lineno: int, colno: int, end_lineno: int, end_colno: int):
+ super().__init__(lineno, colno, args.filename, end_lineno=end_lineno, end_colno=end_colno)
+ self.args = args
+
+class EmptyNode(BaseNode):
+ def __init__(self, lineno: int, colno: int, filename: str):
+ super().__init__(lineno, colno, filename)
+ self.value = None
+
+class OrNode(BaseNode):
+ def __init__(self, left: BaseNode, right: BaseNode):
+ super().__init__(left.lineno, left.colno, left.filename)
+ self.left = left # type: BaseNode
+ self.right = right # type: BaseNode
+
+class AndNode(BaseNode):
+ def __init__(self, left: BaseNode, right: BaseNode):
+ super().__init__(left.lineno, left.colno, left.filename)
+ self.left = left # type: BaseNode
+ self.right = right # type: BaseNode
+
+class ComparisonNode(BaseNode):
+ def __init__(self, ctype: str, left: BaseNode, right: BaseNode):
+ super().__init__(left.lineno, left.colno, left.filename)
+ self.left = left # type: BaseNode
+ self.right = right # type: BaseNode
+ self.ctype = ctype # type: str
+
+class ArithmeticNode(BaseNode):
+ def __init__(self, operation: str, left: BaseNode, right: BaseNode):
+ super().__init__(left.lineno, left.colno, left.filename)
+ self.left = left # type: BaseNode
+ self.right = right # type: BaseNode
+ self.operation = operation # type: str
+
+class NotNode(BaseNode):
+ def __init__(self, token: Token[TV_TokenTypes], value: BaseNode):
+ super().__init__(token.lineno, token.colno, token.filename)
+ self.value = value # type: BaseNode
+
+class CodeBlockNode(BaseNode):
+ def __init__(self, token: Token[TV_TokenTypes]):
+ super().__init__(token.lineno, token.colno, token.filename)
+ self.lines = [] # type: T.List[BaseNode]
+
+class IndexNode(BaseNode):
+ def __init__(self, iobject: BaseNode, index: BaseNode):
+ super().__init__(iobject.lineno, iobject.colno, iobject.filename)
+ self.iobject = iobject # type: BaseNode
+ self.index = index # type: BaseNode
+
+class MethodNode(BaseNode):
+ def __init__(self, filename: str, lineno: int, colno: int, source_object: BaseNode, name: str, args: ArgumentNode):
+ super().__init__(lineno, colno, filename)
+ self.source_object = source_object # type: BaseNode
+ self.name = name # type: str
+ assert(isinstance(self.name, str))
+ self.args = args # type: ArgumentNode
+
+class FunctionNode(BaseNode):
+ def __init__(self, filename: str, lineno: int, colno: int, end_lineno: int, end_colno: int, func_name: str, args: ArgumentNode):
+ super().__init__(lineno, colno, filename, end_lineno=end_lineno, end_colno=end_colno)
+ self.func_name = func_name # type: str
+ assert(isinstance(func_name, str))
+ self.args = args # type: ArgumentNode
+
+class AssignmentNode(BaseNode):
+ def __init__(self, filename: str, lineno: int, colno: int, var_name: str, value: BaseNode):
+ super().__init__(lineno, colno, filename)
+ self.var_name = var_name # type: str
+ assert(isinstance(var_name, str))
+ self.value = value # type: BaseNode
+
+class PlusAssignmentNode(BaseNode):
+ def __init__(self, filename: str, lineno: int, colno: int, var_name: str, value: BaseNode):
+ super().__init__(lineno, colno, filename)
+ self.var_name = var_name # type: str
+ assert(isinstance(var_name, str))
+ self.value = value # type: BaseNode
+
+class ForeachClauseNode(BaseNode):
+ def __init__(self, token: Token, varnames: T.List[str], items: BaseNode, block: CodeBlockNode):
+ super().__init__(token.lineno, token.colno, token.filename)
+ self.varnames = varnames # type: T.List[str]
+ self.items = items # type: BaseNode
+ self.block = block # type: CodeBlockNode
+
+class IfNode(BaseNode):
+ def __init__(self, linenode: BaseNode, condition: BaseNode, block: CodeBlockNode):
+ super().__init__(linenode.lineno, linenode.colno, linenode.filename)
+ self.condition = condition # type: BaseNode
+ self.block = block # type: CodeBlockNode
+
+class IfClauseNode(BaseNode):
+ def __init__(self, linenode: BaseNode):
+ super().__init__(linenode.lineno, linenode.colno, linenode.filename)
+ self.ifs = [] # type: T.List[IfNode]
+ self.elseblock = None # type: T.Union[EmptyNode, CodeBlockNode]
+
+class UMinusNode(BaseNode):
+ def __init__(self, current_location: Token, value: BaseNode):
+ super().__init__(current_location.lineno, current_location.colno, current_location.filename)
+ self.value = value # type: BaseNode
+
+class TernaryNode(BaseNode):
+ def __init__(self, condition: BaseNode, trueblock: BaseNode, falseblock: BaseNode):
+ super().__init__(condition.lineno, condition.colno, condition.filename)
+ self.condition = condition # type: BaseNode
+ self.trueblock = trueblock # type: BaseNode
+ self.falseblock = falseblock # type: BaseNode
+
+comparison_map = {'equal': '==',
+ 'nequal': '!=',
+ 'lt': '<',
+ 'le': '<=',
+ 'gt': '>',
+ 'ge': '>=',
+ 'in': 'in',
+ 'notin': 'not in',
+ }
+
+# Recursive descent parser for Meson's definition language.
+# Very basic apart from the fact that we have many precedence
+# levels so there are not enough words to describe them all.
+# Enter numbering:
+#
+# 1 assignment
+# 2 or
+# 3 and
+# 4 comparison
+# 5 arithmetic
+# 6 negation
+# 7 funcall, method call
+# 8 parentheses
+# 9 plain token
+
+class Parser:
+ def __init__(self, code: str, filename: str):
+ self.lexer = Lexer(code)
+ self.stream = self.lexer.lex(filename)
+ self.current = Token('eof', '', 0, 0, 0, (0, 0), None) # type: Token
+ self.getsym()
+ self.in_ternary = False
+
+ def getsym(self) -> None:
+ try:
+ self.current = next(self.stream)
+ except StopIteration:
+ self.current = Token('eof', '', self.current.line_start, self.current.lineno, self.current.colno + self.current.bytespan[1] - self.current.bytespan[0], (0, 0), None)
+
+ def getline(self) -> str:
+ return self.lexer.getline(self.current.line_start)
+
+ def accept(self, s: str) -> bool:
+ if self.current.tid == s:
+ self.getsym()
+ return True
+ return False
+
+ def accept_any(self, tids: T.Sequence[str]) -> str:
+ tid = self.current.tid
+ if tid in tids:
+ self.getsym()
+ return tid
+ return ''
+
+ def expect(self, s: str) -> bool:
+ if self.accept(s):
+ return True
+ raise ParseException(f'Expecting {s} got {self.current.tid}.', self.getline(), self.current.lineno, self.current.colno)
+
+ def block_expect(self, s: str, block_start: Token) -> bool:
+ if self.accept(s):
+ return True
+ raise BlockParseException(f'Expecting {s} got {self.current.tid}.', self.getline(), self.current.lineno, self.current.colno, self.lexer.getline(block_start.line_start), block_start.lineno, block_start.colno)
+
+ def parse(self) -> CodeBlockNode:
+ block = self.codeblock()
+ self.expect('eof')
+ return block
+
+ def statement(self) -> BaseNode:
+ return self.e1()
+
+ def e1(self) -> BaseNode:
+ left = self.e2()
+ if self.accept('plusassign'):
+ value = self.e1()
+ if not isinstance(left, IdNode):
+ raise ParseException('Plusassignment target must be an id.', self.getline(), left.lineno, left.colno)
+ assert isinstance(left.value, str)
+ return PlusAssignmentNode(left.filename, left.lineno, left.colno, left.value, value)
+ elif self.accept('assign'):
+ value = self.e1()
+ if not isinstance(left, IdNode):
+ raise ParseException('Assignment target must be an id.',
+ self.getline(), left.lineno, left.colno)
+ assert isinstance(left.value, str)
+ return AssignmentNode(left.filename, left.lineno, left.colno, left.value, value)
+ elif self.accept('questionmark'):
+ if self.in_ternary:
+ raise ParseException('Nested ternary operators are not allowed.',
+ self.getline(), left.lineno, left.colno)
+ self.in_ternary = True
+ trueblock = self.e1()
+ self.expect('colon')
+ falseblock = self.e1()
+ self.in_ternary = False
+ return TernaryNode(left, trueblock, falseblock)
+ return left
+
+ def e2(self) -> BaseNode:
+ left = self.e3()
+ while self.accept('or'):
+ if isinstance(left, EmptyNode):
+ raise ParseException('Invalid or clause.',
+ self.getline(), left.lineno, left.colno)
+ left = OrNode(left, self.e3())
+ return left
+
+ def e3(self) -> BaseNode:
+ left = self.e4()
+ while self.accept('and'):
+ if isinstance(left, EmptyNode):
+ raise ParseException('Invalid and clause.',
+ self.getline(), left.lineno, left.colno)
+ left = AndNode(left, self.e4())
+ return left
+
+ def e4(self) -> BaseNode:
+ left = self.e5()
+ for nodename, operator_type in comparison_map.items():
+ if self.accept(nodename):
+ return ComparisonNode(operator_type, left, self.e5())
+ if self.accept('not') and self.accept('in'):
+ return ComparisonNode('notin', left, self.e5())
+ return left
+
+ def e5(self) -> BaseNode:
+ return self.e5addsub()
+
+ def e5addsub(self) -> BaseNode:
+ op_map = {
+ 'plus': 'add',
+ 'dash': 'sub',
+ }
+ left = self.e5muldiv()
+ while True:
+ op = self.accept_any(tuple(op_map.keys()))
+ if op:
+ left = ArithmeticNode(op_map[op], left, self.e5muldiv())
+ else:
+ break
+ return left
+
+ def e5muldiv(self) -> BaseNode:
+ op_map = {
+ 'percent': 'mod',
+ 'star': 'mul',
+ 'fslash': 'div',
+ }
+ left = self.e6()
+ while True:
+ op = self.accept_any(tuple(op_map.keys()))
+ if op:
+ left = ArithmeticNode(op_map[op], left, self.e6())
+ else:
+ break
+ return left
+
+ def e6(self) -> BaseNode:
+ if self.accept('not'):
+ return NotNode(self.current, self.e7())
+ if self.accept('dash'):
+ return UMinusNode(self.current, self.e7())
+ return self.e7()
+
+ def e7(self) -> BaseNode:
+ left = self.e8()
+ block_start = self.current
+ if self.accept('lparen'):
+ args = self.args()
+ self.block_expect('rparen', block_start)
+ if not isinstance(left, IdNode):
+ raise ParseException('Function call must be applied to plain id',
+ self.getline(), left.lineno, left.colno)
+ assert isinstance(left.value, str)
+ left = FunctionNode(left.filename, left.lineno, left.colno, self.current.lineno, self.current.colno, left.value, args)
+ go_again = True
+ while go_again:
+ go_again = False
+ if self.accept('dot'):
+ go_again = True
+ left = self.method_call(left)
+ if self.accept('lbracket'):
+ go_again = True
+ left = self.index_call(left)
+ return left
+
+ def e8(self) -> BaseNode:
+ block_start = self.current
+ if self.accept('lparen'):
+ e = self.statement()
+ self.block_expect('rparen', block_start)
+ return e
+ elif self.accept('lbracket'):
+ args = self.args()
+ self.block_expect('rbracket', block_start)
+ return ArrayNode(args, block_start.lineno, block_start.colno, self.current.lineno, self.current.colno)
+ elif self.accept('lcurl'):
+ key_values = self.key_values()
+ self.block_expect('rcurl', block_start)
+ return DictNode(key_values, block_start.lineno, block_start.colno, self.current.lineno, self.current.colno)
+ else:
+ return self.e9()
+
+ def e9(self) -> BaseNode:
+ t = self.current
+ if self.accept('true'):
+ t.value = True
+ return BooleanNode(t)
+ if self.accept('false'):
+ t.value = False
+ return BooleanNode(t)
+ if self.accept('id'):
+ return IdNode(t)
+ if self.accept('number'):
+ return NumberNode(t)
+ if self.accept('string'):
+ return StringNode(t)
+ if self.accept('fstring'):
+ return FormatStringNode(t)
+ return EmptyNode(self.current.lineno, self.current.colno, self.current.filename)
+
+ def key_values(self) -> ArgumentNode:
+ s = self.statement() # type: BaseNode
+ a = ArgumentNode(self.current)
+
+ while not isinstance(s, EmptyNode):
+ if self.accept('colon'):
+ a.set_kwarg_no_check(s, self.statement())
+ potential = self.current
+ if not self.accept('comma'):
+ return a
+ a.commas.append(potential)
+ else:
+ raise ParseException('Only key:value pairs are valid in dict construction.',
+ self.getline(), s.lineno, s.colno)
+ s = self.statement()
+ return a
+
+ def args(self) -> ArgumentNode:
+ s = self.statement() # type: BaseNode
+ a = ArgumentNode(self.current)
+
+ while not isinstance(s, EmptyNode):
+ potential = self.current
+ if self.accept('comma'):
+ a.commas.append(potential)
+ a.append(s)
+ elif self.accept('colon'):
+ if not isinstance(s, IdNode):
+ raise ParseException('Dictionary key must be a plain identifier.',
+ self.getline(), s.lineno, s.colno)
+ a.set_kwarg(s, self.statement())
+ potential = self.current
+ if not self.accept('comma'):
+ return a
+ a.commas.append(potential)
+ else:
+ a.append(s)
+ return a
+ s = self.statement()
+ return a
+
+ def method_call(self, source_object: BaseNode) -> MethodNode:
+ methodname = self.e9()
+ if not(isinstance(methodname, IdNode)):
+ raise ParseException('Method name must be plain id',
+ self.getline(), self.current.lineno, self.current.colno)
+ assert isinstance(methodname.value, str)
+ self.expect('lparen')
+ args = self.args()
+ self.expect('rparen')
+ method = MethodNode(methodname.filename, methodname.lineno, methodname.colno, source_object, methodname.value, args)
+ if self.accept('dot'):
+ return self.method_call(method)
+ return method
+
+ def index_call(self, source_object: BaseNode) -> IndexNode:
+ index_statement = self.statement()
+ self.expect('rbracket')
+ return IndexNode(source_object, index_statement)
+
+ def foreachblock(self) -> ForeachClauseNode:
+ t = self.current
+ self.expect('id')
+ assert isinstance(t.value, str)
+ varname = t
+ varnames = [t.value] # type: T.List[str]
+
+ if self.accept('comma'):
+ t = self.current
+ self.expect('id')
+ assert isinstance(t.value, str)
+ varnames.append(t.value)
+
+ self.expect('colon')
+ items = self.statement()
+ block = self.codeblock()
+ return ForeachClauseNode(varname, varnames, items, block)
+
+ def ifblock(self) -> IfClauseNode:
+ condition = self.statement()
+ clause = IfClauseNode(condition)
+ self.expect('eol')
+ block = self.codeblock()
+ clause.ifs.append(IfNode(clause, condition, block))
+ self.elseifblock(clause)
+ clause.elseblock = self.elseblock()
+ return clause
+
+ def elseifblock(self, clause: IfClauseNode) -> None:
+ while self.accept('elif'):
+ s = self.statement()
+ self.expect('eol')
+ b = self.codeblock()
+ clause.ifs.append(IfNode(s, s, b))
+
+ def elseblock(self) -> T.Union[CodeBlockNode, EmptyNode]:
+ if self.accept('else'):
+ self.expect('eol')
+ return self.codeblock()
+ return EmptyNode(self.current.lineno, self.current.colno, self.current.filename)
+
+ def line(self) -> BaseNode:
+ block_start = self.current
+ if self.current == 'eol':
+ return EmptyNode(self.current.lineno, self.current.colno, self.current.filename)
+ if self.accept('if'):
+ ifblock = self.ifblock()
+ self.block_expect('endif', block_start)
+ return ifblock
+ if self.accept('foreach'):
+ forblock = self.foreachblock()
+ self.block_expect('endforeach', block_start)
+ return forblock
+ if self.accept('continue'):
+ return ContinueNode(self.current)
+ if self.accept('break'):
+ return BreakNode(self.current)
+ return self.statement()
+
+ def codeblock(self) -> CodeBlockNode:
+ block = CodeBlockNode(self.current)
+ cond = True
+ while cond:
+ curline = self.line()
+ if not isinstance(curline, EmptyNode):
+ block.lines.append(curline)
+ cond = self.accept('eol')
+ return block
diff --git a/meson/mesonbuild/msetup.py b/meson/mesonbuild/msetup.py
new file mode 100644
index 000000000..9ed298160
--- /dev/null
+++ b/meson/mesonbuild/msetup.py
@@ -0,0 +1,282 @@
+# Copyright 2016-2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import typing as T
+import time
+import sys, stat
+import datetime
+import os.path
+import platform
+import cProfile as profile
+import argparse
+import tempfile
+import shutil
+import glob
+
+from . import environment, interpreter, mesonlib
+from . import build
+from . import mlog, coredata
+from . import mintro
+from .mconf import make_lower_case
+from .mesonlib import MesonException
+
+git_ignore_file = '''# This file is autogenerated by Meson. If you change or delete it, it won't be recreated.
+*
+'''
+
+hg_ignore_file = '''# This file is autogenerated by Meson. If you change or delete it, it won't be recreated.
+syntax: glob
+**/*
+'''
+
+
+def add_arguments(parser: argparse.ArgumentParser) -> None:
+ coredata.register_builtin_arguments(parser)
+ parser.add_argument('--native-file',
+ default=[],
+ action='append',
+ help='File containing overrides for native compilation environment.')
+ parser.add_argument('--cross-file',
+ default=[],
+ action='append',
+ help='File describing cross compilation environment.')
+ parser.add_argument('-v', '--version', action='version',
+ version=coredata.version)
+ parser.add_argument('--profile-self', action='store_true', dest='profile',
+ help=argparse.SUPPRESS)
+ parser.add_argument('--fatal-meson-warnings', action='store_true', dest='fatal_warnings',
+ help='Make all Meson warnings fatal')
+ parser.add_argument('--reconfigure', action='store_true',
+ help='Set options and reconfigure the project. Useful when new ' +
+ 'options have been added to the project and the default value ' +
+ 'is not working.')
+ parser.add_argument('--wipe', action='store_true',
+ help='Wipe build directory and reconfigure using previous command line options. ' +
+ 'Useful when build directory got corrupted, or when rebuilding with a ' +
+ 'newer version of meson.')
+ parser.add_argument('builddir', nargs='?', default=None)
+ parser.add_argument('sourcedir', nargs='?', default=None)
+
+class MesonApp:
+ def __init__(self, options: argparse.Namespace) -> None:
+ (self.source_dir, self.build_dir) = self.validate_dirs(options.builddir,
+ options.sourcedir,
+ options.reconfigure,
+ options.wipe)
+ if options.wipe:
+ # Make a copy of the cmd line file to make sure we can always
+ # restore that file if anything bad happens. For example if
+ # configuration fails we need to be able to wipe again.
+ restore = []
+ with tempfile.TemporaryDirectory() as d:
+ for filename in [coredata.get_cmd_line_file(self.build_dir)] + glob.glob(os.path.join(self.build_dir, environment.Environment.private_dir, '*.ini')):
+ try:
+ restore.append((shutil.copy(filename, d), filename))
+ except FileNotFoundError:
+ raise MesonException(
+ 'Cannot find cmd_line.txt. This is probably because this '
+ 'build directory was configured with a meson version < 0.49.0.')
+
+ coredata.read_cmd_line_file(self.build_dir, options)
+
+ try:
+ # Don't delete the whole tree, just all of the files and
+ # folders in the tree. Otherwise calling wipe form the builddir
+ # will cause a crash
+ for l in os.listdir(self.build_dir):
+ l = os.path.join(self.build_dir, l)
+ if os.path.isdir(l) and not os.path.islink(l):
+ mesonlib.windows_proof_rmtree(l)
+ else:
+ mesonlib.windows_proof_rm(l)
+ finally:
+ self.add_vcs_ignore_files(self.build_dir)
+ for b, f in restore:
+ os.makedirs(os.path.dirname(f), exist_ok=True)
+ shutil.move(b, f)
+
+ self.options = options
+
+ def has_build_file(self, dirname: str) -> bool:
+ fname = os.path.join(dirname, environment.build_filename)
+ return os.path.exists(fname)
+
+ def validate_core_dirs(self, dir1: str, dir2: str) -> T.Tuple[str, str]:
+ if dir1 is None:
+ if dir2 is None:
+ if not os.path.exists('meson.build') and os.path.exists('../meson.build'):
+ dir2 = '..'
+ else:
+ raise MesonException('Must specify at least one directory name.')
+ dir1 = os.getcwd()
+ if dir2 is None:
+ dir2 = os.getcwd()
+ ndir1 = os.path.abspath(os.path.realpath(dir1))
+ ndir2 = os.path.abspath(os.path.realpath(dir2))
+ if not os.path.exists(ndir1):
+ os.makedirs(ndir1)
+ if not os.path.exists(ndir2):
+ os.makedirs(ndir2)
+ if not stat.S_ISDIR(os.stat(ndir1).st_mode):
+ raise MesonException(f'{dir1} is not a directory')
+ if not stat.S_ISDIR(os.stat(ndir2).st_mode):
+ raise MesonException(f'{dir2} is not a directory')
+ if os.path.samefile(ndir1, ndir2):
+ # Fallback to textual compare if undefined entries found
+ has_undefined = any((s.st_ino == 0 and s.st_dev == 0) for s in (os.stat(ndir1), os.stat(ndir2)))
+ if not has_undefined or ndir1 == ndir2:
+ raise MesonException('Source and build directories must not be the same. Create a pristine build directory.')
+ if self.has_build_file(ndir1):
+ if self.has_build_file(ndir2):
+ raise MesonException(f'Both directories contain a build file {environment.build_filename}.')
+ return ndir1, ndir2
+ if self.has_build_file(ndir2):
+ return ndir2, ndir1
+ raise MesonException(f'Neither directory contains a build file {environment.build_filename}.')
+
+ def add_vcs_ignore_files(self, build_dir: str) -> None:
+ if os.listdir(build_dir):
+ return
+ with open(os.path.join(build_dir, '.gitignore'), 'w', encoding='utf-8') as ofile:
+ ofile.write(git_ignore_file)
+ with open(os.path.join(build_dir, '.hgignore'), 'w', encoding='utf-8') as ofile:
+ ofile.write(hg_ignore_file)
+
+ def validate_dirs(self, dir1: str, dir2: str, reconfigure: bool, wipe: bool) -> T.Tuple[str, str]:
+ (src_dir, build_dir) = self.validate_core_dirs(dir1, dir2)
+ self.add_vcs_ignore_files(build_dir)
+ priv_dir = os.path.join(build_dir, 'meson-private/coredata.dat')
+ if os.path.exists(priv_dir):
+ if not reconfigure and not wipe:
+ print('Directory already configured.\n'
+ '\nJust run your build command (e.g. ninja) and Meson will regenerate as necessary.\n'
+ 'If ninja fails, run "ninja reconfigure" or "meson --reconfigure"\n'
+ 'to force Meson to regenerate.\n'
+ '\nIf build failures persist, run "meson setup --wipe" to rebuild from scratch\n'
+ 'using the same options as passed when configuring the build.'
+ '\nTo change option values, run "meson configure" instead.')
+ raise SystemExit
+ else:
+ has_cmd_line_file = os.path.exists(coredata.get_cmd_line_file(build_dir))
+ if (wipe and not has_cmd_line_file) or (not wipe and reconfigure):
+ raise SystemExit(f'Directory does not contain a valid build tree:\n{build_dir}')
+ return src_dir, build_dir
+
+ def generate(self) -> None:
+ env = environment.Environment(self.source_dir, self.build_dir, self.options)
+ mlog.initialize(env.get_log_dir(), self.options.fatal_warnings)
+ if self.options.profile:
+ mlog.set_timestamp_start(time.monotonic())
+ if env.coredata.options[mesonlib.OptionKey('backend')].value == 'xcode':
+ mlog.warning('xcode backend is currently unmaintained, patches welcome')
+ with mesonlib.BuildDirLock(self.build_dir):
+ self._generate(env)
+
+ def _generate(self, env: environment.Environment) -> None:
+ mlog.debug('Build started at', datetime.datetime.now().isoformat())
+ mlog.debug('Main binary:', sys.executable)
+ mlog.debug('Build Options:', coredata.get_cmd_line_options(self.build_dir, self.options))
+ mlog.debug('Python system:', platform.system())
+ mlog.log(mlog.bold('The Meson build system'))
+ mlog.log('Version:', coredata.version)
+ mlog.log('Source dir:', mlog.bold(self.source_dir))
+ mlog.log('Build dir:', mlog.bold(self.build_dir))
+ if env.is_cross_build():
+ mlog.log('Build type:', mlog.bold('cross build'))
+ else:
+ mlog.log('Build type:', mlog.bold('native build'))
+ b = build.Build(env)
+
+ intr = interpreter.Interpreter(b)
+ if env.is_cross_build():
+ logger_fun = mlog.log
+ else:
+ logger_fun = mlog.debug
+ build_machine = intr.builtin['build_machine']
+ host_machine = intr.builtin['host_machine']
+ target_machine = intr.builtin['target_machine']
+ assert isinstance(build_machine, interpreter.MachineHolder)
+ assert isinstance(host_machine, interpreter.MachineHolder)
+ assert isinstance(target_machine, interpreter.MachineHolder)
+ logger_fun('Build machine cpu family:', mlog.bold(build_machine.cpu_family_method([], {})))
+ logger_fun('Build machine cpu:', mlog.bold(build_machine.cpu_method([], {})))
+ mlog.log('Host machine cpu family:', mlog.bold(host_machine.cpu_family_method([], {})))
+ mlog.log('Host machine cpu:', mlog.bold(host_machine.cpu_method([], {})))
+ logger_fun('Target machine cpu family:', mlog.bold(target_machine.cpu_family_method([], {})))
+ logger_fun('Target machine cpu:', mlog.bold(target_machine.cpu_method([], {})))
+ try:
+ if self.options.profile:
+ fname = os.path.join(self.build_dir, 'meson-private', 'profile-interpreter.log')
+ profile.runctx('intr.run()', globals(), locals(), filename=fname)
+ else:
+ intr.run()
+ except Exception as e:
+ mintro.write_meson_info_file(b, [e])
+ raise
+ # Print all default option values that don't match the current value
+ for def_opt_name, def_opt_value, cur_opt_value in intr.get_non_matching_default_options():
+ mlog.log('Option', mlog.bold(def_opt_name), 'is:',
+ mlog.bold('{}'.format(make_lower_case(cur_opt_value.printable_value()))),
+ '[default: {}]'.format(make_lower_case(def_opt_value)))
+ try:
+ dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat')
+ # We would like to write coredata as late as possible since we use the existence of
+ # this file to check if we generated the build file successfully. Since coredata
+ # includes settings, the build files must depend on it and appear newer. However, due
+ # to various kernel caches, we cannot guarantee that any time in Python is exactly in
+ # sync with the time that gets applied to any files. Thus, we dump this file as late as
+ # possible, but before build files, and if any error occurs, delete it.
+ cdf = env.dump_coredata()
+ if self.options.profile:
+ fname = f'profile-{intr.backend.name}-backend.log'
+ fname = os.path.join(self.build_dir, 'meson-private', fname)
+ profile.runctx('intr.backend.generate()', globals(), locals(), filename=fname)
+ else:
+ intr.backend.generate()
+ b.devenv.append(intr.backend.get_devenv())
+ build.save(b, dumpfile)
+ if env.first_invocation:
+ # Use path resolved by coredata because they could have been
+ # read from a pipe and wrote into a private file.
+ self.options.cross_file = env.coredata.cross_files
+ self.options.native_file = env.coredata.config_files
+ coredata.write_cmd_line_file(self.build_dir, self.options)
+ else:
+ coredata.update_cmd_line_file(self.build_dir, self.options)
+
+ # Generate an IDE introspection file with the same syntax as the already existing API
+ if self.options.profile:
+ fname = os.path.join(self.build_dir, 'meson-private', 'profile-introspector.log')
+ profile.runctx('mintro.generate_introspection_file(b, intr.backend)', globals(), locals(), filename=fname)
+ else:
+ mintro.generate_introspection_file(b, intr.backend)
+ mintro.write_meson_info_file(b, [], True)
+
+ # Post-conf scripts must be run after writing coredata or else introspection fails.
+ intr.backend.run_postconf_scripts()
+ except Exception as e:
+ mintro.write_meson_info_file(b, [e])
+ if 'cdf' in locals():
+ old_cdf = cdf + '.prev'
+ if os.path.exists(old_cdf):
+ os.replace(old_cdf, cdf)
+ else:
+ os.unlink(cdf)
+ raise
+
+def run(options: argparse.Namespace) -> int:
+ coredata.parse_cmd_line_options(options)
+ app = MesonApp(options)
+ app.generate()
+ return 0
diff --git a/meson/mesonbuild/msubprojects.py b/meson/mesonbuild/msubprojects.py
new file mode 100755
index 000000000..269f0feba
--- /dev/null
+++ b/meson/mesonbuild/msubprojects.py
@@ -0,0 +1,561 @@
+import os, subprocess
+import argparse
+import asyncio
+import threading
+import copy
+import shutil
+from concurrent.futures.thread import ThreadPoolExecutor
+from pathlib import Path
+import typing as T
+
+from . import mlog
+from .mesonlib import quiet_git, GitException, Popen_safe, MesonException, windows_proof_rmtree
+from .wrap.wrap import PackageDefinition, Resolver, WrapException, ALL_TYPES
+from .wrap import wraptool
+
+ALL_TYPES_STRING = ', '.join(ALL_TYPES)
+
+class Logger:
+ def __init__(self, total_tasks: int) -> None:
+ self.lock = threading.Lock()
+ self.total_tasks = total_tasks
+ self.completed_tasks = 0
+ self.running_tasks = set()
+ self.should_erase_line = ''
+
+ def flush(self) -> None:
+ if self.should_erase_line:
+ print(self.should_erase_line, end='\r')
+ self.should_erase_line = ''
+
+ def print_progress(self) -> None:
+ line = f'Progress: {self.completed_tasks} / {self.total_tasks}'
+ max_len = shutil.get_terminal_size().columns - len(line)
+ running = ', '.join(self.running_tasks)
+ if len(running) + 3 > max_len:
+ running = running[:max_len - 6] + '...'
+ line = line + f' ({running})'
+ print(self.should_erase_line, line, sep='', end='\r')
+ self.should_erase_line = '\x1b[K'
+
+ def start(self, wrap_name: str) -> None:
+ with self.lock:
+ self.running_tasks.add(wrap_name)
+ self.print_progress()
+
+ def done(self, wrap_name: str, log_queue: T.List[T.Tuple[mlog.TV_LoggableList, T.Any]]) -> None:
+ with self.lock:
+ self.flush()
+ for args, kwargs in log_queue:
+ mlog.log(*args, **kwargs)
+ self.running_tasks.remove(wrap_name)
+ self.completed_tasks += 1
+ self.print_progress()
+
+
+class Runner:
+ def __init__(self, logger: Logger, r: Resolver, wrap: PackageDefinition, repo_dir: str, options: argparse.Namespace) -> None:
+ # FIXME: Do a copy because Resolver.resolve() is stateful method that
+ # cannot be called from multiple threads.
+ self.wrap_resolver = copy.copy(r)
+ self.wrap = wrap
+ self.repo_dir = repo_dir
+ self.options = options
+ self.run_method = options.subprojects_func.__get__(self)
+ self.log_queue = []
+ self.logger = logger
+
+ def log(self, *args, **kwargs):
+ self.log_queue.append((args, kwargs))
+
+ def run(self):
+ self.logger.start(self.wrap.name)
+ try:
+ result = self.run_method()
+ except MesonException as e:
+ self.log(mlog.red('Error:'), str(e))
+ result = False
+ self.logger.done(self.wrap.name, self.log_queue)
+ return result
+
+ def update_wrapdb_file(self):
+ try:
+ patch_url = self.wrap.get('patch_url')
+ branch, revision = wraptool.parse_patch_url(patch_url)
+ except WrapException:
+ return
+ new_branch, new_revision = wraptool.get_latest_version(self.wrap.name)
+ if new_branch != branch or new_revision != revision:
+ wraptool.update_wrap_file(self.wrap.filename, self.wrap.name, new_branch, new_revision)
+ self.log(' -> New wrap file downloaded.')
+
+ def update_file(self):
+ self.update_wrapdb_file()
+ if not os.path.isdir(self.repo_dir):
+ # The subproject is not needed, or it is a tarball extracted in
+ # 'libfoo-1.0' directory and the version has been bumped and the new
+ # directory is 'libfoo-2.0'. In that case forcing a meson
+ # reconfigure will download and use the new tarball.
+ self.log(' -> Not used.')
+ return True
+ elif self.options.reset:
+ # Delete existing directory and redownload. It is possible that nothing
+ # changed but we have no way to know. Hopefully tarballs are still
+ # cached.
+ windows_proof_rmtree(self.repo_dir)
+ try:
+ self.wrap_resolver.resolve(self.wrap.name, 'meson')
+ self.log(' -> New version extracted')
+ return True
+ except WrapException as e:
+ self.log(' ->', mlog.red(str(e)))
+ return False
+ else:
+ # The subproject has not changed, or the new source and/or patch
+ # tarballs should be extracted in the same directory than previous
+ # version.
+ self.log(' -> Subproject has not changed, or the new source/patch needs to be extracted on the same location.')
+ self.log(' Pass --reset option to delete directory and redownload.')
+ return False
+
+ def git_output(self, cmd):
+ return quiet_git(cmd, self.repo_dir, check=True)[1]
+
+ def git_verbose(self, cmd):
+ self.log(self.git_output(cmd))
+
+ def git_stash(self):
+ # That git command return 1 (failure) when there is something to stash.
+ # We don't want to stash when there is nothing to stash because that would
+ # print spurious "No local changes to save".
+ if not quiet_git(['diff', '--quiet', 'HEAD'], self.repo_dir)[0]:
+ # Don't pipe stdout here because we want the user to see their changes have
+ # been saved.
+ self.git_verbose(['stash'])
+
+ def git_show(self):
+ commit_message = self.git_output(['show', '--quiet', '--pretty=format:%h%n%d%n%s%n[%an]'])
+ parts = [s.strip() for s in commit_message.split('\n')]
+ self.log(' ->', mlog.yellow(parts[0]), mlog.red(parts[1]), parts[2], mlog.blue(parts[3]))
+
+ def git_rebase(self, revision):
+ try:
+ self.git_output(['-c', 'rebase.autoStash=true', 'rebase', 'FETCH_HEAD'])
+ except GitException as e:
+ self.log(' -> Could not rebase', mlog.bold(self.repo_dir), 'onto', mlog.bold(revision))
+ self.log(mlog.red(e.output))
+ self.log(mlog.red(str(e)))
+ return False
+ return True
+
+ def git_reset(self, revision):
+ try:
+ # Stash local changes, commits can always be found back in reflog, to
+ # avoid any data lost by mistake.
+ self.git_stash()
+ self.git_output(['reset', '--hard', 'FETCH_HEAD'])
+ except GitException as e:
+ self.log(' -> Could not reset', mlog.bold(repo_dir), 'to', mlog.bold(revision))
+ self.log(mlog.red(e.output))
+ self.log(mlog.red(str(e)))
+ return False
+ return True
+
+ def git_checkout(self, revision, create=False):
+ cmd = ['checkout', '--ignore-other-worktrees', revision, '--']
+ if create:
+ cmd.insert('-b', 1)
+ try:
+ # Stash local changes, commits can always be found back in reflog, to
+ # avoid any data lost by mistake.
+ self.git_stash()
+ self.git_output(cmd)
+ except GitException as e:
+ self.log(' -> Could not checkout', mlog.bold(revision), 'in', mlog.bold(self.repo_dir))
+ self.log(mlog.red(e.output))
+ self.log(mlog.red(str(e)))
+ return False
+ return True
+
+ def git_checkout_and_reset(self, revision):
+ # revision could be a branch that already exists but is outdated, so we still
+ # have to reset after the checkout.
+ success = self.git_checkout(revision)
+ if success:
+ success = self.git_reset(revision)
+ return success
+
+ def git_checkout_and_rebase(self, revision):
+ # revision could be a branch that already exists but is outdated, so we still
+ # have to rebase after the checkout.
+ success = self.git_checkout(revision)
+ if success:
+ success = self.git_rebase(revision)
+ return success
+
+ def update_git(self):
+ if not os.path.isdir(self.repo_dir):
+ self.log(' -> Not used.')
+ return True
+ if not os.path.exists(os.path.join(self.repo_dir, '.git')):
+ if self.options.reset:
+ # Delete existing directory and redownload
+ windows_proof_rmtree(self.repo_dir)
+ try:
+ self.wrap_resolver.resolve(self.wrap.name, 'meson')
+ self.update_git_done()
+ return True
+ except WrapException as e:
+ self.log(' ->', mlog.red(str(e)))
+ return False
+ else:
+ self.log(' -> Not a git repository.')
+ self.log('Pass --reset option to delete directory and redownload.')
+ return False
+ revision = self.wrap.values.get('revision')
+ url = self.wrap.values.get('url')
+ push_url = self.wrap.values.get('push-url')
+ if not revision or not url:
+ # It could be a detached git submodule for example.
+ self.log(' -> No revision or URL specified.')
+ return True
+ try:
+ origin_url = self.git_output(['remote', 'get-url', 'origin']).strip()
+ except GitException as e:
+ self.log(' -> Failed to determine current origin URL in', mlog.bold(self.repo_dir))
+ self.log(mlog.red(e.output))
+ self.log(mlog.red(str(e)))
+ return False
+ if self.options.reset:
+ try:
+ self.git_output(['remote', 'set-url', 'origin', url])
+ if push_url:
+ self.git_output(['remote', 'set-url', '--push', 'origin', push_url])
+ except GitException as e:
+ self.log(' -> Failed to reset origin URL in', mlog.bold(self.repo_dir))
+ self.log(mlog.red(e.output))
+ self.log(mlog.red(str(e)))
+ return False
+ elif url != origin_url:
+ self.log(f' -> URL changed from {origin_url!r} to {url!r}')
+ return False
+ try:
+ # Same as `git branch --show-current` but compatible with older git version
+ branch = self.git_output(['rev-parse', '--abbrev-ref', 'HEAD']).strip()
+ branch = branch if branch != 'HEAD' else ''
+ except GitException as e:
+ self.log(' -> Failed to determine current branch in', mlog.bold(self.repo_dir))
+ self.log(mlog.red(e.output))
+ self.log(mlog.red(str(e)))
+ return False
+ try:
+ # Fetch only the revision we need, this avoids fetching useless branches.
+ # revision can be either a branch, tag or commit id. In all cases we want
+ # FETCH_HEAD to be set to the desired commit and "git checkout <revision>"
+ # to to either switch to existing/new branch, or detach to tag/commit.
+ # It is more complicated than it first appear, see discussion there:
+ # https://github.com/mesonbuild/meson/pull/7723#discussion_r488816189.
+ heads_refmap = '+refs/heads/*:refs/remotes/origin/*'
+ tags_refmap = '+refs/tags/*:refs/tags/*'
+ self.git_output(['fetch', '--refmap', heads_refmap, '--refmap', tags_refmap, 'origin', revision])
+ except GitException as e:
+ self.log(' -> Could not fetch revision', mlog.bold(revision), 'in', mlog.bold(self.repo_dir))
+ self.log(mlog.red(e.output))
+ self.log(mlog.red(str(e)))
+ return False
+
+ if branch == '':
+ # We are currently in detached mode
+ if self.options.reset:
+ success = self.git_checkout_and_reset(revision)
+ else:
+ success = self.git_checkout_and_rebase(revision)
+ elif branch == revision:
+ # We are in the same branch. A reset could still be needed in the case
+ # a force push happened on remote repository.
+ if self.options.reset:
+ success = self.git_reset(revision)
+ else:
+ success = self.git_rebase(revision)
+ else:
+ # We are in another branch, either the user created their own branch and
+ # we should rebase it, or revision changed in the wrap file and we need
+ # to checkout the new branch.
+ if self.options.reset:
+ success = self.git_checkout_and_reset(revision)
+ else:
+ success = self.git_rebase(revision)
+ if success:
+ self.update_git_done()
+ return success
+
+ def update_git_done(self):
+ self.git_output(['submodule', 'update', '--checkout', '--recursive'])
+ self.git_show()
+
+ def update_hg(self):
+ if not os.path.isdir(self.repo_dir):
+ self.log(' -> Not used.')
+ return True
+ revno = self.wrap.get('revision')
+ if revno.lower() == 'tip':
+ # Failure to do pull is not a fatal error,
+ # because otherwise you can't develop without
+ # a working net connection.
+ subprocess.call(['hg', 'pull'], cwd=self.repo_dir)
+ else:
+ if subprocess.call(['hg', 'checkout', revno], cwd=self.repo_dir) != 0:
+ subprocess.check_call(['hg', 'pull'], cwd=self.repo_dir)
+ subprocess.check_call(['hg', 'checkout', revno], cwd=self.repo_dir)
+ return True
+
+ def update_svn(self):
+ if not os.path.isdir(self.repo_dir):
+ self.log(' -> Not used.')
+ return True
+ revno = self.wrap.get('revision')
+ p, out, _ = Popen_safe(['svn', 'info', '--show-item', 'revision', self.repo_dir])
+ current_revno = out
+ if current_revno == revno:
+ return True
+ if revno.lower() == 'head':
+ # Failure to do pull is not a fatal error,
+ # because otherwise you can't develop without
+ # a working net connection.
+ subprocess.call(['svn', 'update'], cwd=self.repo_dir)
+ else:
+ subprocess.check_call(['svn', 'update', '-r', revno], cwd=self.repo_dir)
+ return True
+
+ def update(self):
+ self.log(f'Updating {self.wrap.name}...')
+ if self.wrap.type == 'file':
+ return self.update_file()
+ elif self.wrap.type == 'git':
+ return self.update_git()
+ elif self.wrap.type == 'hg':
+ return self.update_hg()
+ elif self.wrap.type == 'svn':
+ return self.update_svn()
+ elif self.wrap.type is None:
+ self.log(' -> Cannot update subproject with no wrap file')
+ else:
+ self.log(' -> Cannot update', self.wrap.type, 'subproject')
+ return True
+
+ def checkout(self):
+ if self.wrap.type != 'git' or not os.path.isdir(self.repo_dir):
+ return True
+ branch_name = self.options.branch_name if self.options.branch_name else self.wrap.get('revision')
+ if not branch_name:
+ # It could be a detached git submodule for example.
+ return True
+ self.log(f'Checkout {branch_name} in {self.wrap.name}...')
+ if self.git_checkout(branch_name, create=self.options.b):
+ self.git_show()
+ return True
+ return False
+
+ def download(self):
+ self.log(f'Download {self.wrap.name}...')
+ if os.path.isdir(self.repo_dir):
+ self.log(' -> Already downloaded')
+ return True
+ try:
+ self.wrap_resolver.resolve(self.wrap.name, 'meson')
+ self.log(' -> done')
+ except WrapException as e:
+ self.log(' ->', mlog.red(str(e)))
+ return False
+ return True
+
+ def foreach(self):
+ self.log(f'Executing command in {self.repo_dir}')
+ if not os.path.isdir(self.repo_dir):
+ self.log(' -> Not downloaded yet')
+ return True
+ cmd = [self.options.command] + self.options.args
+ p, out, _ = Popen_safe(cmd, stderr=subprocess.STDOUT, cwd=self.repo_dir)
+ if p.returncode != 0:
+ err_message = "Command '{}' returned non-zero exit status {}.".format(" ".join(cmd), p.returncode)
+ self.log(' -> ', mlog.red(err_message))
+ self.log(out, end='')
+ return False
+
+ self.log(out, end='')
+ return True
+
+ def purge(self) -> bool:
+ # if subproject is not wrap-based, then don't remove it
+ if not self.wrap.type:
+ return True
+
+ if self.wrap.redirected:
+ redirect_file = Path(self.wrap.original_filename).resolve()
+ if self.options.confirm:
+ redirect_file.unlink()
+ mlog.log(f'Deleting {redirect_file}')
+
+ if self.wrap.type == 'redirect':
+ redirect_file = Path(self.wrap.filename).resolve()
+ if self.options.confirm:
+ redirect_file.unlink()
+ self.log(f'Deleting {redirect_file}')
+
+ if self.options.include_cache:
+ packagecache = Path(self.wrap_resolver.cachedir).resolve()
+ try:
+ subproject_cache_file = packagecache / self.wrap.get("source_filename")
+ if subproject_cache_file.is_file():
+ if self.options.confirm:
+ subproject_cache_file.unlink()
+ self.log(f'Deleting {subproject_cache_file}')
+ except WrapException:
+ pass
+
+ try:
+ subproject_patch_file = packagecache / self.wrap.get("patch_filename")
+ if subproject_patch_file.is_file():
+ if self.options.confirm:
+ subproject_patch_file.unlink()
+ self.log(f'Deleting {subproject_patch_file}')
+ except WrapException:
+ pass
+
+ # Don't log that we will remove an empty directory. Since purge is
+ # parallelized, another thread could have deleted it already.
+ try:
+ if not any(packagecache.iterdir()):
+ windows_proof_rmtree(str(packagecache))
+ except FileNotFoundError:
+ pass
+
+ subproject_source_dir = Path(self.repo_dir).resolve()
+
+ # Don't follow symlink. This is covered by the next if statement, but why
+ # not be doubly sure.
+ if subproject_source_dir.is_symlink():
+ if self.options.confirm:
+ subproject_source_dir.unlink()
+ self.log(f'Deleting {subproject_source_dir}')
+ return True
+ if not subproject_source_dir.is_dir():
+ return True
+
+ try:
+ if self.options.confirm:
+ windows_proof_rmtree(str(subproject_source_dir))
+ self.log(f'Deleting {subproject_source_dir}')
+ except OSError as e:
+ mlog.error(f'Unable to remove: {subproject_source_dir}: {e}')
+ return False
+
+ return True
+
+ @staticmethod
+ def post_purge(options):
+ if not options.confirm:
+ mlog.log('')
+ mlog.log('Nothing has been deleted, run again with --confirm to apply.')
+
+def add_common_arguments(p):
+ p.add_argument('--sourcedir', default='.',
+ help='Path to source directory')
+ p.add_argument('--types', default='',
+ help=f'Comma-separated list of subproject types. Supported types are: {ALL_TYPES_STRING} (default: all)')
+ p.add_argument('--num-processes', default=None, type=int,
+ help='How many parallel processes to use (Since 0.59.0).')
+
+def add_subprojects_argument(p):
+ p.add_argument('subprojects', nargs='*',
+ help='List of subprojects (default: all)')
+
+def add_arguments(parser):
+ subparsers = parser.add_subparsers(title='Commands', dest='command')
+ subparsers.required = True
+
+ p = subparsers.add_parser('update', help='Update all subprojects from wrap files')
+ p.add_argument('--rebase', default=True, action='store_true',
+ help='Rebase your branch on top of wrap\'s revision. ' + \
+ 'Deprecated, it is now the default behaviour. (git only)')
+ p.add_argument('--reset', default=False, action='store_true',
+ help='Checkout wrap\'s revision and hard reset to that commit. (git only)')
+ add_common_arguments(p)
+ add_subprojects_argument(p)
+ p.set_defaults(subprojects_func=Runner.update)
+
+ p = subparsers.add_parser('checkout', help='Checkout a branch (git only)')
+ p.add_argument('-b', default=False, action='store_true',
+ help='Create a new branch')
+ p.add_argument('branch_name', nargs='?',
+ help='Name of the branch to checkout or create (default: revision set in wrap file)')
+ add_common_arguments(p)
+ add_subprojects_argument(p)
+ p.set_defaults(subprojects_func=Runner.checkout)
+
+ p = subparsers.add_parser('download', help='Ensure subprojects are fetched, even if not in use. ' +
+ 'Already downloaded subprojects are not modified. ' +
+ 'This can be used to pre-fetch all subprojects and avoid downloads during configure.')
+ add_common_arguments(p)
+ add_subprojects_argument(p)
+ p.set_defaults(subprojects_func=Runner.download)
+
+ p = subparsers.add_parser('foreach', help='Execute a command in each subproject directory.')
+ p.add_argument('command', metavar='command ...',
+ help='Command to execute in each subproject directory')
+ p.add_argument('args', nargs=argparse.REMAINDER,
+ help=argparse.SUPPRESS)
+ add_common_arguments(p)
+ p.set_defaults(subprojects=[])
+ p.set_defaults(subprojects_func=Runner.foreach)
+
+ p = subparsers.add_parser('purge', help='Remove all wrap-based subproject artifacts')
+ add_common_arguments(p)
+ add_subprojects_argument(p)
+ p.add_argument('--include-cache', action='store_true', default=False, help='Remove the package cache as well')
+ p.add_argument('--confirm', action='store_true', default=False, help='Confirm the removal of subproject artifacts')
+ p.set_defaults(subprojects_func=Runner.purge)
+ p.set_defaults(post_func=Runner.post_purge)
+
+def run(options):
+ src_dir = os.path.relpath(os.path.realpath(options.sourcedir))
+ if not os.path.isfile(os.path.join(src_dir, 'meson.build')):
+ mlog.error('Directory', mlog.bold(src_dir), 'does not seem to be a Meson source directory.')
+ return 1
+ subprojects_dir = os.path.join(src_dir, 'subprojects')
+ if not os.path.isdir(subprojects_dir):
+ mlog.log('Directory', mlog.bold(src_dir), 'does not seem to have subprojects.')
+ return 0
+ r = Resolver(src_dir, 'subprojects')
+ if options.subprojects:
+ wraps = [wrap for name, wrap in r.wraps.items() if name in options.subprojects]
+ else:
+ wraps = r.wraps.values()
+ types = [t.strip() for t in options.types.split(',')] if options.types else []
+ for t in types:
+ if t not in ALL_TYPES:
+ raise MesonException(f'Unknown subproject type {t!r}, supported types are: {ALL_TYPES_STRING}')
+ tasks = []
+ task_names = []
+ loop = asyncio.get_event_loop()
+ executor = ThreadPoolExecutor(options.num_processes)
+ if types:
+ wraps = [wrap for wrap in wraps if wrap.type in types]
+ logger = Logger(len(wraps))
+ for wrap in wraps:
+ dirname = Path(subprojects_dir, wrap.directory).as_posix()
+ runner = Runner(logger, r, wrap, dirname, options)
+ task = loop.run_in_executor(executor, runner.run)
+ tasks.append(task)
+ task_names.append(wrap.name)
+ results = loop.run_until_complete(asyncio.gather(*tasks))
+ logger.flush()
+ post_func = getattr(options, 'post_func', None)
+ if post_func:
+ post_func(options)
+ failures = [name for name, success in zip(task_names, results) if not success]
+ if failures:
+ m = 'Please check logs above as command failed in some subprojects which could have been left in conflict state: '
+ m += ', '.join(failures)
+ mlog.warning(m)
+ return len(failures)
diff --git a/meson/mesonbuild/mtest.py b/meson/mesonbuild/mtest.py
new file mode 100644
index 000000000..a44f7f291
--- /dev/null
+++ b/meson/mesonbuild/mtest.py
@@ -0,0 +1,2011 @@
+# Copyright 2016-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A tool to run tests in many different ways.
+
+from pathlib import Path
+from collections import deque
+from copy import deepcopy
+import argparse
+import asyncio
+import datetime
+import enum
+import json
+import multiprocessing
+import os
+import pickle
+import platform
+import random
+import re
+import signal
+import subprocess
+import shlex
+import sys
+import textwrap
+import time
+import typing as T
+import unicodedata
+import xml.etree.ElementTree as et
+
+from . import build
+from . import environment
+from . import mlog
+from .coredata import major_versions_differ, MesonVersionMismatchException
+from .coredata import version as coredata_version
+from .mesonlib import (MesonException, OrderedSet, RealPathAction,
+ get_wine_shortpath, join_args, split_args)
+from .mintro import get_infodir, load_info_file
+from .programs import ExternalProgram
+from .backend.backends import TestProtocol, TestSerialisation
+
+# GNU autotools interprets a return code of 77 from tests it executes to
+# mean that the test should be skipped.
+GNU_SKIP_RETURNCODE = 77
+
+# GNU autotools interprets a return code of 99 from tests it executes to
+# mean that the test failed even before testing what it is supposed to test.
+GNU_ERROR_RETURNCODE = 99
+
+# Exit if 3 Ctrl-C's are received within one second
+MAX_CTRLC = 3
+
+def is_windows() -> bool:
+ platname = platform.system().lower()
+ return platname == 'windows'
+
+def is_cygwin() -> bool:
+ return sys.platform == 'cygwin'
+
+UNIWIDTH_MAPPING = {'F': 2, 'H': 1, 'W': 2, 'Na': 1, 'N': 1, 'A': 1}
+def uniwidth(s: str) -> int:
+ result = 0
+ for c in s:
+ w = unicodedata.east_asian_width(c)
+ result += UNIWIDTH_MAPPING[w]
+ return result
+
+def determine_worker_count() -> int:
+ varname = 'MESON_TESTTHREADS'
+ if varname in os.environ:
+ try:
+ num_workers = int(os.environ[varname])
+ except ValueError:
+ print(f'Invalid value in {varname}, using 1 thread.')
+ num_workers = 1
+ else:
+ try:
+ # Fails in some weird environments such as Debian
+ # reproducible build.
+ num_workers = multiprocessing.cpu_count()
+ except Exception:
+ num_workers = 1
+ return num_workers
+
+def add_arguments(parser: argparse.ArgumentParser) -> None:
+ parser.add_argument('--repeat', default=1, dest='repeat', type=int,
+ help='Number of times to run the tests.')
+ parser.add_argument('--no-rebuild', default=False, action='store_true',
+ help='Do not rebuild before running tests.')
+ parser.add_argument('--gdb', default=False, dest='gdb', action='store_true',
+ help='Run test under gdb.')
+ parser.add_argument('--gdb-path', default='gdb', dest='gdb_path',
+ help='Path to the gdb binary (default: gdb).')
+ parser.add_argument('--list', default=False, dest='list', action='store_true',
+ help='List available tests.')
+ parser.add_argument('--wrapper', default=None, dest='wrapper', type=split_args,
+ help='wrapper to run tests with (e.g. Valgrind)')
+ parser.add_argument('-C', dest='wd', action=RealPathAction,
+ # https://github.com/python/typeshed/issues/3107
+ # https://github.com/python/mypy/issues/7177
+ type=os.path.abspath, # type: ignore
+ help='directory to cd into before running')
+ parser.add_argument('--suite', default=[], dest='include_suites', action='append', metavar='SUITE',
+ help='Only run tests belonging to the given suite.')
+ parser.add_argument('--no-suite', default=[], dest='exclude_suites', action='append', metavar='SUITE',
+ help='Do not run tests belonging to the given suite.')
+ parser.add_argument('--no-stdsplit', default=True, dest='split', action='store_false',
+ help='Do not split stderr and stdout in test logs.')
+ parser.add_argument('--print-errorlogs', default=False, action='store_true',
+ help="Whether to print failing tests' logs.")
+ parser.add_argument('--benchmark', default=False, action='store_true',
+ help="Run benchmarks instead of tests.")
+ parser.add_argument('--logbase', default='testlog',
+ help="Base name for log file.")
+ parser.add_argument('--num-processes', default=determine_worker_count(), type=int,
+ help='How many parallel processes to use.')
+ parser.add_argument('-v', '--verbose', default=False, action='store_true',
+ help='Do not redirect stdout and stderr')
+ parser.add_argument('-q', '--quiet', default=False, action='store_true',
+ help='Produce less output to the terminal.')
+ parser.add_argument('-t', '--timeout-multiplier', type=float, default=None,
+ help='Define a multiplier for test timeout, for example '
+ ' when running tests in particular conditions they might take'
+ ' more time to execute. (<= 0 to disable timeout)')
+ parser.add_argument('--setup', default=None, dest='setup',
+ help='Which test setup to use.')
+ parser.add_argument('--test-args', default=[], type=split_args,
+ help='Arguments to pass to the specified test(s) or all tests')
+ parser.add_argument('args', nargs='*',
+ help='Optional list of test names to run. "testname" to run all tests with that name, '
+ '"subprojname:testname" to specifically run "testname" from "subprojname", '
+ '"subprojname:" to run all tests defined by "subprojname".')
+
+
+def print_safe(s: str) -> None:
+ end = '' if s[-1] == '\n' else '\n'
+ try:
+ print(s, end=end)
+ except UnicodeEncodeError:
+ s = s.encode('ascii', errors='backslashreplace').decode('ascii')
+ print(s, end=end)
+
+def join_lines(a: str, b: str) -> str:
+ if not a:
+ return b
+ if not b:
+ return a
+ return a + '\n' + b
+
+def dashes(s: str, dash: str, cols: int) -> str:
+ if not s:
+ return dash * cols
+ s = ' ' + s + ' '
+ width = uniwidth(s)
+ first = (cols - width) // 2
+ s = dash * first + s
+ return s + dash * (cols - first - width)
+
+def returncode_to_status(retcode: int) -> str:
+ # Note: We can't use `os.WIFSIGNALED(result.returncode)` and the related
+ # functions here because the status returned by subprocess is munged. It
+ # returns a negative value if the process was killed by a signal rather than
+ # the raw status returned by `wait()`. Also, If a shell sits between Meson
+ # the the actual unit test that shell is likely to convert a termination due
+ # to a signal into an exit status of 128 plus the signal number.
+ if retcode < 0:
+ signum = -retcode
+ try:
+ signame = signal.Signals(signum).name
+ except ValueError:
+ signame = 'SIGinvalid'
+ return f'killed by signal {signum} {signame}'
+
+ if retcode <= 128:
+ return f'exit status {retcode}'
+
+ signum = retcode - 128
+ try:
+ signame = signal.Signals(signum).name
+ except ValueError:
+ signame = 'SIGinvalid'
+ return f'(exit status {retcode} or signal {signum} {signame})'
+
+# TODO for Windows
+sh_quote: T.Callable[[str], str] = lambda x: x
+if not is_windows():
+ sh_quote = shlex.quote
+
+def env_tuple_to_str(env: T.Iterable[T.Tuple[str, str]]) -> str:
+ return ''.join(["{}={} ".format(k, sh_quote(v)) for k, v in env])
+
+
+class TestException(MesonException):
+ pass
+
+
+@enum.unique
+class ConsoleUser(enum.Enum):
+
+ # the logger can use the console
+ LOGGER = 0
+
+ # the console is used by gdb
+ GDB = 1
+
+ # the console is used to write stdout/stderr
+ STDOUT = 2
+
+
+@enum.unique
+class TestResult(enum.Enum):
+
+ PENDING = 'PENDING'
+ RUNNING = 'RUNNING'
+ OK = 'OK'
+ TIMEOUT = 'TIMEOUT'
+ INTERRUPT = 'INTERRUPT'
+ SKIP = 'SKIP'
+ FAIL = 'FAIL'
+ EXPECTEDFAIL = 'EXPECTEDFAIL'
+ UNEXPECTEDPASS = 'UNEXPECTEDPASS'
+ ERROR = 'ERROR'
+
+ @staticmethod
+ def maxlen() -> int:
+ return 14 # len(UNEXPECTEDPASS)
+
+ def is_ok(self) -> bool:
+ return self in {TestResult.OK, TestResult.EXPECTEDFAIL}
+
+ def is_bad(self) -> bool:
+ return self in {TestResult.FAIL, TestResult.TIMEOUT, TestResult.INTERRUPT,
+ TestResult.UNEXPECTEDPASS, TestResult.ERROR}
+
+ def is_finished(self) -> bool:
+ return self not in {TestResult.PENDING, TestResult.RUNNING}
+
+ def was_killed(self) -> bool:
+ return self in (TestResult.TIMEOUT, TestResult.INTERRUPT)
+
+ def colorize(self, s: str) -> mlog.AnsiDecorator:
+ if self.is_bad():
+ decorator = mlog.red
+ elif self in (TestResult.SKIP, TestResult.EXPECTEDFAIL):
+ decorator = mlog.yellow
+ elif self.is_finished():
+ decorator = mlog.green
+ else:
+ decorator = mlog.blue
+ return decorator(s)
+
+ def get_text(self, colorize: bool) -> str:
+ result_str = '{res:{reslen}}'.format(res=self.value, reslen=self.maxlen())
+ return self.colorize(result_str).get_text(colorize)
+
+ def get_command_marker(self) -> str:
+ return str(self.colorize('>>> '))
+
+
+TYPE_TAPResult = T.Union['TAPParser.Test', 'TAPParser.Error', 'TAPParser.Version', 'TAPParser.Plan', 'TAPParser.Bailout']
+
+class TAPParser:
+ class Plan(T.NamedTuple):
+ num_tests: int
+ late: bool
+ skipped: bool
+ explanation: T.Optional[str]
+
+ class Bailout(T.NamedTuple):
+ message: str
+
+ class Test(T.NamedTuple):
+ number: int
+ name: str
+ result: TestResult
+ explanation: T.Optional[str]
+
+ def __str__(self) -> str:
+ return f'{self.number} {self.name}'.strip()
+
+ class Error(T.NamedTuple):
+ message: str
+
+ class Version(T.NamedTuple):
+ version: int
+
+ _MAIN = 1
+ _AFTER_TEST = 2
+ _YAML = 3
+
+ _RE_BAILOUT = re.compile(r'Bail out!\s*(.*)')
+ _RE_DIRECTIVE = re.compile(r'(?:\s*\#\s*([Ss][Kk][Ii][Pp]\S*|[Tt][Oo][Dd][Oo])\b\s*(.*))?')
+ _RE_PLAN = re.compile(r'1\.\.([0-9]+)' + _RE_DIRECTIVE.pattern)
+ _RE_TEST = re.compile(r'((?:not )?ok)\s*(?:([0-9]+)\s*)?([^#]*)' + _RE_DIRECTIVE.pattern)
+ _RE_VERSION = re.compile(r'TAP version ([0-9]+)')
+ _RE_YAML_START = re.compile(r'(\s+)---.*')
+ _RE_YAML_END = re.compile(r'\s+\.\.\.\s*')
+
+ found_late_test = False
+ bailed_out = False
+ plan: T.Optional[Plan] = None
+ lineno = 0
+ num_tests = 0
+ yaml_lineno: T.Optional[int] = None
+ yaml_indent = ''
+ state = _MAIN
+ version = 12
+
+ def parse_test(self, ok: bool, num: int, name: str, directive: T.Optional[str], explanation: T.Optional[str]) -> \
+ T.Generator[T.Union['TAPParser.Test', 'TAPParser.Error'], None, None]:
+ name = name.strip()
+ explanation = explanation.strip() if explanation else None
+ if directive is not None:
+ directive = directive.upper()
+ if directive.startswith('SKIP'):
+ if ok:
+ yield self.Test(num, name, TestResult.SKIP, explanation)
+ return
+ elif directive == 'TODO':
+ yield self.Test(num, name, TestResult.UNEXPECTEDPASS if ok else TestResult.EXPECTEDFAIL, explanation)
+ return
+ else:
+ yield self.Error(f'invalid directive "{directive}"')
+
+ yield self.Test(num, name, TestResult.OK if ok else TestResult.FAIL, explanation)
+
+ async def parse_async(self, lines: T.AsyncIterator[str]) -> T.AsyncIterator[TYPE_TAPResult]:
+ async for line in lines:
+ for event in self.parse_line(line):
+ yield event
+ for event in self.parse_line(None):
+ yield event
+
+ def parse(self, io: T.Iterator[str]) -> T.Iterator[TYPE_TAPResult]:
+ for line in io:
+ yield from self.parse_line(line)
+ yield from self.parse_line(None)
+
+ def parse_line(self, line: T.Optional[str]) -> T.Iterator[TYPE_TAPResult]:
+ if line is not None:
+ self.lineno += 1
+ line = line.rstrip()
+
+ # YAML blocks are only accepted after a test
+ if self.state == self._AFTER_TEST:
+ if self.version >= 13:
+ m = self._RE_YAML_START.match(line)
+ if m:
+ self.state = self._YAML
+ self.yaml_lineno = self.lineno
+ self.yaml_indent = m.group(1)
+ return
+ self.state = self._MAIN
+
+ elif self.state == self._YAML:
+ if self._RE_YAML_END.match(line):
+ self.state = self._MAIN
+ return
+ if line.startswith(self.yaml_indent):
+ return
+ yield self.Error(f'YAML block not terminated (started on line {self.yaml_lineno})')
+ self.state = self._MAIN
+
+ assert self.state == self._MAIN
+ if line.startswith('#'):
+ return
+
+ m = self._RE_TEST.match(line)
+ if m:
+ if self.plan and self.plan.late and not self.found_late_test:
+ yield self.Error('unexpected test after late plan')
+ self.found_late_test = True
+ self.num_tests += 1
+ num = self.num_tests if m.group(2) is None else int(m.group(2))
+ if num != self.num_tests:
+ yield self.Error('out of order test numbers')
+ yield from self.parse_test(m.group(1) == 'ok', num,
+ m.group(3), m.group(4), m.group(5))
+ self.state = self._AFTER_TEST
+ return
+
+ m = self._RE_PLAN.match(line)
+ if m:
+ if self.plan:
+ yield self.Error('more than one plan found')
+ else:
+ num_tests = int(m.group(1))
+ skipped = (num_tests == 0)
+ if m.group(2):
+ if m.group(2).upper().startswith('SKIP'):
+ if num_tests > 0:
+ yield self.Error('invalid SKIP directive for plan')
+ skipped = True
+ else:
+ yield self.Error('invalid directive for plan')
+ self.plan = self.Plan(num_tests=num_tests, late=(self.num_tests > 0),
+ skipped=skipped, explanation=m.group(3))
+ yield self.plan
+ return
+
+ m = self._RE_BAILOUT.match(line)
+ if m:
+ yield self.Bailout(m.group(1))
+ self.bailed_out = True
+ return
+
+ m = self._RE_VERSION.match(line)
+ if m:
+ # The TAP version is only accepted as the first line
+ if self.lineno != 1:
+ yield self.Error('version number must be on the first line')
+ return
+ self.version = int(m.group(1))
+ if self.version < 13:
+ yield self.Error('version number should be at least 13')
+ else:
+ yield self.Version(version=self.version)
+ return
+
+ if not line:
+ return
+
+ yield self.Error('unexpected input at line {}'.format((self.lineno,)))
+ else:
+ # end of file
+ if self.state == self._YAML:
+ yield self.Error(f'YAML block not terminated (started on line {self.yaml_lineno})')
+
+ if not self.bailed_out and self.plan and self.num_tests != self.plan.num_tests:
+ if self.num_tests < self.plan.num_tests:
+ yield self.Error(f'Too few tests run (expected {self.plan.num_tests}, got {self.num_tests})')
+ else:
+ yield self.Error(f'Too many tests run (expected {self.plan.num_tests}, got {self.num_tests})')
+
+class TestLogger:
+ def flush(self) -> None:
+ pass
+
+ def start(self, harness: 'TestHarness') -> None:
+ pass
+
+ def start_test(self, harness: 'TestHarness', test: 'TestRun') -> None:
+ pass
+
+ def log_subtest(self, harness: 'TestHarness', test: 'TestRun', s: str, res: TestResult) -> None:
+ pass
+
+ def log(self, harness: 'TestHarness', result: 'TestRun') -> None:
+ pass
+
+ async def finish(self, harness: 'TestHarness') -> None:
+ pass
+
+ def close(self) -> None:
+ pass
+
+
+class TestFileLogger(TestLogger):
+ def __init__(self, filename: str, errors: str = 'replace') -> None:
+ self.filename = filename
+ self.file = open(filename, 'w', encoding='utf-8', errors=errors)
+
+ def close(self) -> None:
+ if self.file:
+ self.file.close()
+ self.file = None
+
+
+class ConsoleLogger(TestLogger):
+ SPINNER = "\U0001f311\U0001f312\U0001f313\U0001f314" + \
+ "\U0001f315\U0001f316\U0001f317\U0001f318"
+
+ SCISSORS = "\u2700 "
+ HLINE = "\u2015"
+ RTRI = "\u25B6 "
+
+ def __init__(self) -> None:
+ self.update = asyncio.Event()
+ self.running_tests = OrderedSet() # type: OrderedSet['TestRun']
+ self.progress_test = None # type: T.Optional['TestRun']
+ self.progress_task = None # type: T.Optional[asyncio.Future]
+ self.max_left_width = 0 # type: int
+ self.stop = False
+ self.update = asyncio.Event()
+ self.should_erase_line = ''
+ self.test_count = 0
+ self.started_tests = 0
+ self.spinner_index = 0
+ try:
+ self.cols, _ = os.get_terminal_size(1)
+ self.is_tty = True
+ except OSError:
+ self.cols = 80
+ self.is_tty = False
+
+ self.output_start = dashes(self.SCISSORS, self.HLINE, self.cols - 2)
+ self.output_end = dashes('', self.HLINE, self.cols - 2)
+ self.sub = self.RTRI
+ try:
+ self.output_start.encode(sys.stdout.encoding or 'ascii')
+ except UnicodeEncodeError:
+ self.output_start = dashes('8<', '-', self.cols - 2)
+ self.output_end = dashes('', '-', self.cols - 2)
+ self.sub = '| '
+
+ def flush(self) -> None:
+ if self.should_erase_line:
+ print(self.should_erase_line, end='')
+ self.should_erase_line = ''
+
+ def print_progress(self, line: str) -> None:
+ print(self.should_erase_line, line, sep='', end='\r')
+ self.should_erase_line = '\x1b[K'
+
+ def request_update(self) -> None:
+ self.update.set()
+
+ def emit_progress(self, harness: 'TestHarness') -> None:
+ if self.progress_test is None:
+ self.flush()
+ return
+
+ if len(self.running_tests) == 1:
+ count = f'{self.started_tests}/{self.test_count}'
+ else:
+ count = '{}-{}/{}'.format(self.started_tests - len(self.running_tests) + 1,
+ self.started_tests, self.test_count)
+
+ left = '[{}] {} '.format(count, self.SPINNER[self.spinner_index])
+ self.spinner_index = (self.spinner_index + 1) % len(self.SPINNER)
+
+ right = '{spaces} {dur:{durlen}}'.format(
+ spaces=' ' * TestResult.maxlen(),
+ dur=int(time.time() - self.progress_test.starttime),
+ durlen=harness.duration_max_len)
+ if self.progress_test.timeout:
+ right += '/{timeout:{durlen}}'.format(
+ timeout=self.progress_test.timeout,
+ durlen=harness.duration_max_len)
+ right += 's'
+ detail = self.progress_test.detail
+ if detail:
+ right += ' ' + detail
+
+ line = harness.format(self.progress_test, colorize=True,
+ max_left_width=self.max_left_width,
+ left=left, right=right)
+ self.print_progress(line)
+
+ def start(self, harness: 'TestHarness') -> None:
+ async def report_progress() -> None:
+ loop = asyncio.get_event_loop()
+ next_update = 0.0
+ self.request_update()
+ while not self.stop:
+ await self.update.wait()
+ self.update.clear()
+
+ # We may get here simply because the progress line has been
+ # overwritten, so do not always switch. Only do so every
+ # second, or if the printed test has finished
+ if loop.time() >= next_update:
+ self.progress_test = None
+ next_update = loop.time() + 1
+ loop.call_at(next_update, self.request_update)
+
+ if (self.progress_test and
+ self.progress_test.res is not TestResult.RUNNING):
+ self.progress_test = None
+
+ if not self.progress_test:
+ if not self.running_tests:
+ continue
+ # Pick a test in round robin order
+ self.progress_test = self.running_tests.pop(last=False)
+ self.running_tests.add(self.progress_test)
+
+ self.emit_progress(harness)
+ self.flush()
+
+ self.test_count = harness.test_count
+ self.cols = max(self.cols, harness.max_left_width + 30)
+
+ if self.is_tty and not harness.need_console:
+ # Account for "[aa-bb/cc] OO " in the progress report
+ self.max_left_width = 3 * len(str(self.test_count)) + 8
+ self.progress_task = asyncio.ensure_future(report_progress())
+
+ def start_test(self, harness: 'TestHarness', test: 'TestRun') -> None:
+ if harness.options.verbose and test.cmdline:
+ self.flush()
+ print(harness.format(test, mlog.colorize_console(),
+ max_left_width=self.max_left_width,
+ right=test.res.get_text(mlog.colorize_console())))
+ print(test.res.get_command_marker() + test.cmdline)
+ if test.needs_parsing:
+ pass
+ elif not test.is_parallel:
+ print(self.output_start, flush=True)
+ else:
+ print(flush=True)
+
+ self.started_tests += 1
+ self.running_tests.add(test)
+ self.running_tests.move_to_end(test, last=False)
+ self.request_update()
+
+ def shorten_log(self, harness: 'TestHarness', result: 'TestRun') -> str:
+ if not harness.options.verbose and not harness.options.print_errorlogs:
+ return ''
+
+ log = result.get_log(mlog.colorize_console(),
+ stderr_only=result.needs_parsing)
+ if harness.options.verbose:
+ return log
+
+ lines = log.splitlines()
+ if len(lines) < 100:
+ return log
+ else:
+ return str(mlog.bold('Listing only the last 100 lines from a long log.\n')) + '\n'.join(lines[-100:])
+
+ def print_log(self, harness: 'TestHarness', result: 'TestRun') -> None:
+ if not harness.options.verbose:
+ cmdline = result.cmdline
+ if not cmdline:
+ print(result.res.get_command_marker() + result.stdo)
+ return
+ print(result.res.get_command_marker() + cmdline)
+
+ log = self.shorten_log(harness, result)
+ if log:
+ print(self.output_start)
+ print_safe(log)
+ print(self.output_end)
+
+ def log_subtest(self, harness: 'TestHarness', test: 'TestRun', s: str, result: TestResult) -> None:
+ if harness.options.verbose or (harness.options.print_errorlogs and result.is_bad()):
+ self.flush()
+ print(harness.format(test, mlog.colorize_console(), max_left_width=self.max_left_width,
+ prefix=self.sub,
+ middle=s,
+ right=result.get_text(mlog.colorize_console())), flush=True)
+
+ self.request_update()
+
+ def log(self, harness: 'TestHarness', result: 'TestRun') -> None:
+ self.running_tests.remove(result)
+ if result.res is TestResult.TIMEOUT and harness.options.verbose:
+ self.flush()
+ print(f'{result.name} time out (After {result.timeout} seconds)')
+
+ if not harness.options.quiet or not result.res.is_ok():
+ self.flush()
+ if harness.options.verbose and not result.is_parallel and result.cmdline:
+ if not result.needs_parsing:
+ print(self.output_end)
+ print(harness.format(result, mlog.colorize_console(), max_left_width=self.max_left_width))
+ else:
+ print(harness.format(result, mlog.colorize_console(), max_left_width=self.max_left_width),
+ flush=True)
+ if harness.options.verbose or result.res.is_bad():
+ self.print_log(harness, result)
+ if harness.options.verbose or result.res.is_bad():
+ print(flush=True)
+
+ self.request_update()
+
+ async def finish(self, harness: 'TestHarness') -> None:
+ self.stop = True
+ self.request_update()
+ if self.progress_task:
+ await self.progress_task
+
+ if harness.collected_failures and \
+ (harness.options.print_errorlogs or harness.options.verbose):
+ print("\nSummary of Failures:\n")
+ for i, result in enumerate(harness.collected_failures, 1):
+ print(harness.format(result, mlog.colorize_console()))
+
+ print(harness.summary())
+
+
+class TextLogfileBuilder(TestFileLogger):
+ def start(self, harness: 'TestHarness') -> None:
+ self.file.write(f'Log of Meson test suite run on {datetime.datetime.now().isoformat()}\n\n')
+ inherit_env = env_tuple_to_str(os.environ.items())
+ self.file.write(f'Inherited environment: {inherit_env}\n\n')
+
+ def log(self, harness: 'TestHarness', result: 'TestRun') -> None:
+ self.file.write(harness.format(result, False) + '\n')
+ cmdline = result.cmdline
+ if cmdline:
+ starttime_str = time.strftime("%H:%M:%S", time.gmtime(result.starttime))
+ self.file.write(starttime_str + ' ' + cmdline + '\n')
+ self.file.write(dashes('output', '-', 78) + '\n')
+ self.file.write(result.get_log())
+ self.file.write(dashes('', '-', 78) + '\n\n')
+
+ async def finish(self, harness: 'TestHarness') -> None:
+ if harness.collected_failures:
+ self.file.write("\nSummary of Failures:\n\n")
+ for i, result in enumerate(harness.collected_failures, 1):
+ self.file.write(harness.format(result, False) + '\n')
+ self.file.write(harness.summary())
+
+ print(f'Full log written to {self.filename}')
+
+
+class JsonLogfileBuilder(TestFileLogger):
+ def log(self, harness: 'TestHarness', result: 'TestRun') -> None:
+ jresult = {'name': result.name,
+ 'stdout': result.stdo,
+ 'result': result.res.value,
+ 'starttime': result.starttime,
+ 'duration': result.duration,
+ 'returncode': result.returncode,
+ 'env': result.env,
+ 'command': result.cmd} # type: T.Dict[str, T.Any]
+ if result.stde:
+ jresult['stderr'] = result.stde
+ self.file.write(json.dumps(jresult) + '\n')
+
+
+class JunitBuilder(TestLogger):
+
+ """Builder for Junit test results.
+
+ Junit is impossible to stream out, it requires attributes counting the
+ total number of tests, failures, skips, and errors in the root element
+ and in each test suite. As such, we use a builder class to track each
+ test case, and calculate all metadata before writing it out.
+
+ For tests with multiple results (like from a TAP test), we record the
+ test as a suite with the project_name.test_name. This allows us to track
+ each result separately. For tests with only one result (such as exit-code
+ tests) we record each one into a suite with the name project_name. The use
+ of the project_name allows us to sort subproject tests separately from
+ the root project.
+ """
+
+ def __init__(self, filename: str) -> None:
+ self.filename = filename
+ self.root = et.Element(
+ 'testsuites', tests='0', errors='0', failures='0')
+ self.suites = {} # type: T.Dict[str, et.Element]
+
+ def log(self, harness: 'TestHarness', test: 'TestRun') -> None:
+ """Log a single test case."""
+ if test.junit is not None:
+ for suite in test.junit.findall('.//testsuite'):
+ # Assume that we don't need to merge anything here...
+ suite.attrib['name'] = '{}.{}.{}'.format(test.project, test.name, suite.attrib['name'])
+
+ # GTest can inject invalid attributes
+ for case in suite.findall('.//testcase[@result]'):
+ del case.attrib['result']
+ for case in suite.findall('.//testcase[@timestamp]'):
+ del case.attrib['timestamp']
+ self.root.append(suite)
+ return
+
+ # In this case we have a test binary with multiple results.
+ # We want to record this so that each result is recorded
+ # separately
+ if test.results:
+ suitename = f'{test.project}.{test.name}'
+ assert suitename not in self.suites or harness.options.repeat > 1, 'duplicate suite'
+
+ suite = self.suites[suitename] = et.Element(
+ 'testsuite',
+ name=suitename,
+ tests=str(len(test.results)),
+ errors=str(sum(1 for r in test.results if r.result in
+ {TestResult.INTERRUPT, TestResult.ERROR})),
+ failures=str(sum(1 for r in test.results if r.result in
+ {TestResult.FAIL, TestResult.UNEXPECTEDPASS, TestResult.TIMEOUT})),
+ skipped=str(sum(1 for r in test.results if r.result is TestResult.SKIP)),
+ time=str(test.duration),
+ )
+
+ for subtest in test.results:
+ # Both name and classname are required. Use the suite name as
+ # the class name, so that e.g. GitLab groups testcases correctly.
+ testcase = et.SubElement(suite, 'testcase', name=str(subtest), classname=suitename)
+ if subtest.result is TestResult.SKIP:
+ et.SubElement(testcase, 'skipped')
+ elif subtest.result is TestResult.ERROR:
+ et.SubElement(testcase, 'error')
+ elif subtest.result is TestResult.FAIL:
+ et.SubElement(testcase, 'failure')
+ elif subtest.result is TestResult.UNEXPECTEDPASS:
+ fail = et.SubElement(testcase, 'failure')
+ fail.text = 'Test unexpected passed.'
+ elif subtest.result is TestResult.INTERRUPT:
+ fail = et.SubElement(testcase, 'error')
+ fail.text = 'Test was interrupted by user.'
+ elif subtest.result is TestResult.TIMEOUT:
+ fail = et.SubElement(testcase, 'error')
+ fail.text = 'Test did not finish before configured timeout.'
+ if subtest.explanation:
+ et.SubElement(testcase, 'system-out').text = subtest.explanation
+ if test.stdo:
+ out = et.SubElement(suite, 'system-out')
+ out.text = test.stdo.rstrip()
+ if test.stde:
+ err = et.SubElement(suite, 'system-err')
+ err.text = test.stde.rstrip()
+ else:
+ if test.project not in self.suites:
+ suite = self.suites[test.project] = et.Element(
+ 'testsuite', name=test.project, tests='1', errors='0',
+ failures='0', skipped='0', time=str(test.duration))
+ else:
+ suite = self.suites[test.project]
+ suite.attrib['tests'] = str(int(suite.attrib['tests']) + 1)
+
+ testcase = et.SubElement(suite, 'testcase', name=test.name,
+ classname=test.project, time=str(test.duration))
+ if test.res is TestResult.SKIP:
+ et.SubElement(testcase, 'skipped')
+ suite.attrib['skipped'] = str(int(suite.attrib['skipped']) + 1)
+ elif test.res is TestResult.ERROR:
+ et.SubElement(testcase, 'error')
+ suite.attrib['errors'] = str(int(suite.attrib['errors']) + 1)
+ elif test.res is TestResult.FAIL:
+ et.SubElement(testcase, 'failure')
+ suite.attrib['failures'] = str(int(suite.attrib['failures']) + 1)
+ if test.stdo:
+ out = et.SubElement(testcase, 'system-out')
+ out.text = test.stdo.rstrip()
+ if test.stde:
+ err = et.SubElement(testcase, 'system-err')
+ err.text = test.stde.rstrip()
+
+ async def finish(self, harness: 'TestHarness') -> None:
+ """Calculate total test counts and write out the xml result."""
+ for suite in self.suites.values():
+ self.root.append(suite)
+ # Skipped is really not allowed in the "testsuits" element
+ for attr in ['tests', 'errors', 'failures']:
+ self.root.attrib[attr] = str(int(self.root.attrib[attr]) + int(suite.attrib[attr]))
+
+ tree = et.ElementTree(self.root)
+ with open(self.filename, 'wb') as f:
+ tree.write(f, encoding='utf-8', xml_declaration=True)
+
+
+class TestRun:
+ TEST_NUM = 0
+ PROTOCOL_TO_CLASS: T.Dict[TestProtocol, T.Type['TestRun']] = {}
+
+ def __new__(cls, test: TestSerialisation, *args: T.Any, **kwargs: T.Any) -> T.Any:
+ return super().__new__(TestRun.PROTOCOL_TO_CLASS[test.protocol])
+
+ def __init__(self, test: TestSerialisation, test_env: T.Dict[str, str],
+ name: str, timeout: T.Optional[int], is_parallel: bool):
+ self.res = TestResult.PENDING
+ self.test = test
+ self._num = None # type: T.Optional[int]
+ self.name = name
+ self.timeout = timeout
+ self.results = list() # type: T.List[TAPParser.Test]
+ self.returncode = 0
+ self.starttime = None # type: T.Optional[float]
+ self.duration = None # type: T.Optional[float]
+ self.stdo = None # type: T.Optional[str]
+ self.stde = None # type: T.Optional[str]
+ self.cmd = None # type: T.Optional[T.List[str]]
+ self.env = test_env # type: T.Dict[str, str]
+ self.should_fail = test.should_fail
+ self.project = test.project_name
+ self.junit = None # type: T.Optional[et.ElementTree]
+ self.is_parallel = is_parallel
+
+ def start(self, cmd: T.List[str]) -> None:
+ self.res = TestResult.RUNNING
+ self.starttime = time.time()
+ self.cmd = cmd
+
+ @property
+ def num(self) -> int:
+ if self._num is None:
+ TestRun.TEST_NUM += 1
+ self._num = TestRun.TEST_NUM
+ return self._num
+
+ @property
+ def detail(self) -> str:
+ if self.res is TestResult.PENDING:
+ return ''
+ if self.returncode:
+ return returncode_to_status(self.returncode)
+ if self.results:
+ # running or succeeded
+ passed = sum(x.result.is_ok() for x in self.results)
+ ran = sum(x.result is not TestResult.SKIP for x in self.results)
+ if passed == ran:
+ return f'{passed} subtests passed'
+ else:
+ return f'{passed}/{ran} subtests passed'
+ return ''
+
+ def _complete(self, returncode: int, res: TestResult,
+ stdo: T.Optional[str], stde: T.Optional[str]) -> None:
+ assert isinstance(res, TestResult)
+ if self.should_fail and res in (TestResult.OK, TestResult.FAIL):
+ res = TestResult.UNEXPECTEDPASS if res.is_ok() else TestResult.EXPECTEDFAIL
+
+ self.res = res
+ self.returncode = returncode
+ self.duration = time.time() - self.starttime
+ self.stdo = stdo
+ self.stde = stde
+
+ @property
+ def cmdline(self) -> T.Optional[str]:
+ if not self.cmd:
+ return None
+ test_only_env = set(self.env.items()) - set(os.environ.items())
+ return env_tuple_to_str(test_only_env) + \
+ ' '.join(sh_quote(x) for x in self.cmd)
+
+ def complete_skip(self, message: str) -> None:
+ self.starttime = time.time()
+ self._complete(GNU_SKIP_RETURNCODE, TestResult.SKIP, message, None)
+
+ def complete(self, returncode: int, res: TestResult,
+ stdo: T.Optional[str], stde: T.Optional[str]) -> None:
+ self._complete(returncode, res, stdo, stde)
+
+ def get_log(self, colorize: bool = False, stderr_only: bool = False) -> str:
+ stdo = '' if stderr_only else self.stdo
+ if self.stde:
+ res = ''
+ if stdo:
+ res += mlog.cyan('stdout:').get_text(colorize) + '\n'
+ res += stdo
+ if res[-1:] != '\n':
+ res += '\n'
+ res += mlog.cyan('stderr:').get_text(colorize) + '\n'
+ res += self.stde
+ else:
+ res = stdo
+ if res and res[-1:] != '\n':
+ res += '\n'
+ return res
+
+ @property
+ def needs_parsing(self) -> bool:
+ return False
+
+ async def parse(self, harness: 'TestHarness', lines: T.AsyncIterator[str]) -> T.Tuple[TestResult, str]:
+ async for l in lines:
+ pass
+ return TestResult.OK, ''
+
+
+class TestRunExitCode(TestRun):
+
+ def complete(self, returncode: int, res: TestResult,
+ stdo: T.Optional[str], stde: T.Optional[str]) -> None:
+ if res:
+ pass
+ elif returncode == GNU_SKIP_RETURNCODE:
+ res = TestResult.SKIP
+ elif returncode == GNU_ERROR_RETURNCODE:
+ res = TestResult.ERROR
+ else:
+ res = TestResult.FAIL if bool(returncode) else TestResult.OK
+ super().complete(returncode, res, stdo, stde)
+
+TestRun.PROTOCOL_TO_CLASS[TestProtocol.EXITCODE] = TestRunExitCode
+
+
+class TestRunGTest(TestRunExitCode):
+ def complete(self, returncode: int, res: TestResult,
+ stdo: T.Optional[str], stde: T.Optional[str]) -> None:
+ filename = f'{self.test.name}.xml'
+ if self.test.workdir:
+ filename = os.path.join(self.test.workdir, filename)
+
+ try:
+ self.junit = et.parse(filename)
+ except FileNotFoundError:
+ # This can happen if the test fails to run or complete for some
+ # reason, like the rpath for libgtest isn't properly set. ExitCode
+ # will handle the failure, don't generate a stacktrace.
+ pass
+
+ super().complete(returncode, res, stdo, stde)
+
+TestRun.PROTOCOL_TO_CLASS[TestProtocol.GTEST] = TestRunGTest
+
+
+class TestRunTAP(TestRun):
+ @property
+ def needs_parsing(self) -> bool:
+ return True
+
+ def complete(self, returncode: int, res: TestResult,
+ stdo: str, stde: str) -> None:
+ if returncode != 0 and not res.was_killed():
+ res = TestResult.ERROR
+ stde = stde or ''
+ stde += f'\n(test program exited with status code {returncode})'
+
+ super().complete(returncode, res, stdo, stde)
+
+ async def parse(self, harness: 'TestHarness', lines: T.AsyncIterator[str]) -> T.Tuple[TestResult, str]:
+ res = TestResult.OK
+ error = ''
+
+ async for i in TAPParser().parse_async(lines):
+ if isinstance(i, TAPParser.Bailout):
+ res = TestResult.ERROR
+ harness.log_subtest(self, i.message, res)
+ elif isinstance(i, TAPParser.Test):
+ self.results.append(i)
+ if i.result.is_bad():
+ res = TestResult.FAIL
+ harness.log_subtest(self, i.name or f'subtest {i.number}', i.result)
+ elif isinstance(i, TAPParser.Error):
+ error = '\nTAP parsing error: ' + i.message
+ res = TestResult.ERROR
+
+ if all(t.result is TestResult.SKIP for t in self.results):
+ # This includes the case where self.results is empty
+ res = TestResult.SKIP
+ return res, error
+
+TestRun.PROTOCOL_TO_CLASS[TestProtocol.TAP] = TestRunTAP
+
+
+class TestRunRust(TestRun):
+ @property
+ def needs_parsing(self) -> bool:
+ return True
+
+ async def parse(self, harness: 'TestHarness', lines: T.AsyncIterator[str]) -> T.Tuple[TestResult, str]:
+ def parse_res(n: int, name: str, result: str) -> TAPParser.Test:
+ if result == 'ok':
+ return TAPParser.Test(n, name, TestResult.OK, None)
+ elif result == 'ignored':
+ return TAPParser.Test(n, name, TestResult.SKIP, None)
+ elif result == 'FAILED':
+ return TAPParser.Test(n, name, TestResult.FAIL, None)
+ return TAPParser.Test(n, name, TestResult.ERROR,
+ f'Unsupported output from rust test: {result}')
+
+ n = 1
+ async for line in lines:
+ if line.startswith('test ') and not line.startswith('test result'):
+ _, name, _, result = line.rstrip().split(' ')
+ name = name.replace('::', '.')
+ t = parse_res(n, name, result)
+ self.results.append(t)
+ harness.log_subtest(self, name, t.result)
+ n += 1
+
+ if all(t.result is TestResult.SKIP for t in self.results):
+ # This includes the case where self.results is empty
+ return TestResult.SKIP, ''
+ elif any(t.result is TestResult.ERROR for t in self.results):
+ return TestResult.ERROR, ''
+ elif any(t.result is TestResult.FAIL for t in self.results):
+ return TestResult.FAIL, ''
+ return TestResult.OK, ''
+
+TestRun.PROTOCOL_TO_CLASS[TestProtocol.RUST] = TestRunRust
+
+
+def decode(stream: T.Union[None, bytes]) -> str:
+ if stream is None:
+ return ''
+ try:
+ return stream.decode('utf-8')
+ except UnicodeDecodeError:
+ return stream.decode('iso-8859-1', errors='ignore')
+
+async def read_decode(reader: asyncio.StreamReader, console_mode: ConsoleUser) -> str:
+ stdo_lines = []
+ try:
+ while not reader.at_eof():
+ line = decode(await reader.readline())
+ stdo_lines.append(line)
+ if console_mode is ConsoleUser.STDOUT:
+ print(line, end='', flush=True)
+ return ''.join(stdo_lines)
+ except asyncio.CancelledError:
+ return ''.join(stdo_lines)
+
+# Extract lines out of the StreamReader. Print them
+# along the way if requested, and at the end collect
+# them all into a future.
+async def read_decode_lines(reader: asyncio.StreamReader, q: 'asyncio.Queue[T.Optional[str]]',
+ console_mode: ConsoleUser) -> str:
+ stdo_lines = []
+ try:
+ while not reader.at_eof():
+ line = decode(await reader.readline())
+ stdo_lines.append(line)
+ if console_mode is ConsoleUser.STDOUT:
+ print(line, end='', flush=True)
+ await q.put(line)
+ return ''.join(stdo_lines)
+ except asyncio.CancelledError:
+ return ''.join(stdo_lines)
+ finally:
+ await q.put(None)
+
+def run_with_mono(fname: str) -> bool:
+ return fname.endswith('.exe') and not (is_windows() or is_cygwin())
+
+def check_testdata(objs: T.List[TestSerialisation]) -> T.List[TestSerialisation]:
+ if not isinstance(objs, list):
+ raise MesonVersionMismatchException('<unknown>', coredata_version)
+ for obj in objs:
+ if not isinstance(obj, TestSerialisation):
+ raise MesonVersionMismatchException('<unknown>', coredata_version)
+ if not hasattr(obj, 'version'):
+ raise MesonVersionMismatchException('<unknown>', coredata_version)
+ if major_versions_differ(obj.version, coredata_version):
+ raise MesonVersionMismatchException(obj.version, coredata_version)
+ return objs
+
+# Custom waiting primitives for asyncio
+
+async def try_wait_one(*awaitables: T.Any, timeout: T.Optional[T.Union[int, float]]) -> None:
+ """Wait for completion of one of the given futures, ignoring timeouts."""
+ await asyncio.wait(awaitables,
+ timeout=timeout, return_when=asyncio.FIRST_COMPLETED)
+
+async def queue_iter(q: 'asyncio.Queue[T.Optional[str]]') -> T.AsyncIterator[str]:
+ while True:
+ item = await q.get()
+ q.task_done()
+ if item is None:
+ break
+ yield item
+
+async def complete(future: asyncio.Future) -> None:
+ """Wait for completion of the given future, ignoring cancellation."""
+ try:
+ await future
+ except asyncio.CancelledError:
+ pass
+
+async def complete_all(futures: T.Iterable[asyncio.Future],
+ timeout: T.Optional[T.Union[int, float]] = None) -> None:
+ """Wait for completion of all the given futures, ignoring cancellation.
+ If timeout is not None, raise an asyncio.TimeoutError after the given
+ time has passed. asyncio.TimeoutError is only raised if some futures
+ have not completed and none have raised exceptions, even if timeout
+ is zero."""
+
+ def check_futures(futures: T.Iterable[asyncio.Future]) -> None:
+ # Raise exceptions if needed
+ left = False
+ for f in futures:
+ if not f.done():
+ left = True
+ elif not f.cancelled():
+ f.result()
+ if left:
+ raise asyncio.TimeoutError
+
+ # Python is silly and does not have a variant of asyncio.wait with an
+ # absolute time as deadline.
+ deadline = None if timeout is None else asyncio.get_event_loop().time() + timeout
+ while futures and (timeout is None or timeout > 0):
+ done, futures = await asyncio.wait(futures, timeout=timeout,
+ return_when=asyncio.FIRST_EXCEPTION)
+ check_futures(done)
+ if deadline:
+ timeout = deadline - asyncio.get_event_loop().time()
+
+ check_futures(futures)
+
+
+class TestSubprocess:
+ def __init__(self, p: asyncio.subprocess.Process,
+ stdout: T.Optional[int], stderr: T.Optional[int],
+ postwait_fn: T.Callable[[], None] = None):
+ self._process = p
+ self.stdout = stdout
+ self.stderr = stderr
+ self.stdo_task = None # type: T.Optional[asyncio.Future[str]]
+ self.stde_task = None # type: T.Optional[asyncio.Future[str]]
+ self.postwait_fn = postwait_fn # type: T.Callable[[], None]
+ self.all_futures = [] # type: T.List[asyncio.Future]
+
+ def stdout_lines(self, console_mode: ConsoleUser) -> T.AsyncIterator[str]:
+ q = asyncio.Queue() # type: asyncio.Queue[T.Optional[str]]
+ decode_coro = read_decode_lines(self._process.stdout, q, console_mode)
+ self.stdo_task = asyncio.ensure_future(decode_coro)
+ return queue_iter(q)
+
+ def communicate(self, console_mode: ConsoleUser) -> T.Tuple[T.Optional[T.Awaitable[str]],
+ T.Optional[T.Awaitable[str]]]:
+ # asyncio.ensure_future ensures that printing can
+ # run in the background, even before it is awaited
+ if self.stdo_task is None and self.stdout is not None:
+ decode_coro = read_decode(self._process.stdout, console_mode)
+ self.stdo_task = asyncio.ensure_future(decode_coro)
+ self.all_futures.append(self.stdo_task)
+ if self.stderr is not None and self.stderr != asyncio.subprocess.STDOUT:
+ decode_coro = read_decode(self._process.stderr, console_mode)
+ self.stde_task = asyncio.ensure_future(decode_coro)
+ self.all_futures.append(self.stde_task)
+
+ return self.stdo_task, self.stde_task
+
+ async def _kill(self) -> T.Optional[str]:
+ # Python does not provide multiplatform support for
+ # killing a process and all its children so we need
+ # to roll our own.
+ p = self._process
+ try:
+ if is_windows():
+ subprocess.run(['taskkill', '/F', '/T', '/PID', str(p.pid)])
+ else:
+ # Send a termination signal to the process group that setsid()
+ # created - giving it a chance to perform any cleanup.
+ os.killpg(p.pid, signal.SIGTERM)
+
+ # Make sure the termination signal actually kills the process
+ # group, otherwise retry with a SIGKILL.
+ await try_wait_one(p.wait(), timeout=0.5)
+ if p.returncode is not None:
+ return None
+
+ os.killpg(p.pid, signal.SIGKILL)
+
+ await try_wait_one(p.wait(), timeout=1)
+ if p.returncode is not None:
+ return None
+
+ # An earlier kill attempt has not worked for whatever reason.
+ # Try to kill it one last time with a direct call.
+ # If the process has spawned children, they will remain around.
+ p.kill()
+ await try_wait_one(p.wait(), timeout=1)
+ if p.returncode is not None:
+ return None
+ return 'Test process could not be killed.'
+ except ProcessLookupError:
+ # Sometimes (e.g. with Wine) this happens. There's nothing
+ # we can do, probably the process already died so just wait
+ # for the event loop to pick that up.
+ await p.wait()
+ return None
+ finally:
+ if self.stdo_task:
+ self.stdo_task.cancel()
+ if self.stde_task:
+ self.stde_task.cancel()
+
+ async def wait(self, timeout: T.Optional[int]) -> T.Tuple[int, TestResult, T.Optional[str]]:
+ p = self._process
+ result = None
+ additional_error = None
+
+ self.all_futures.append(asyncio.ensure_future(p.wait()))
+ try:
+ await complete_all(self.all_futures, timeout=timeout)
+ except asyncio.TimeoutError:
+ additional_error = await self._kill()
+ result = TestResult.TIMEOUT
+ except asyncio.CancelledError:
+ # The main loop must have seen Ctrl-C.
+ additional_error = await self._kill()
+ result = TestResult.INTERRUPT
+ finally:
+ if self.postwait_fn:
+ self.postwait_fn()
+
+ return p.returncode or 0, result, additional_error
+
+class SingleTestRunner:
+
+ def __init__(self, test: TestSerialisation, env: T.Dict[str, str], name: str,
+ options: argparse.Namespace):
+ self.test = test
+ self.options = options
+ self.cmd = self._get_cmd()
+
+ if self.cmd and self.test.extra_paths:
+ env['PATH'] = os.pathsep.join(self.test.extra_paths + ['']) + env['PATH']
+ winecmd = []
+ for c in self.cmd:
+ winecmd.append(c)
+ if os.path.basename(c).startswith('wine'):
+ env['WINEPATH'] = get_wine_shortpath(
+ winecmd,
+ ['Z:' + p for p in self.test.extra_paths] + env.get('WINEPATH', '').split(';')
+ )
+ break
+
+ # If MALLOC_PERTURB_ is not set, or if it is set to an empty value,
+ # (i.e., the test or the environment don't explicitly set it), set
+ # it ourselves. We do this unconditionally for regular tests
+ # because it is extremely useful to have.
+ # Setting MALLOC_PERTURB_="0" will completely disable this feature.
+ if ('MALLOC_PERTURB_' not in env or not env['MALLOC_PERTURB_']) and not options.benchmark:
+ env['MALLOC_PERTURB_'] = str(random.randint(1, 255))
+
+ if self.options.gdb or self.test.timeout is None or self.test.timeout <= 0:
+ timeout = None
+ elif self.options.timeout_multiplier is None:
+ timeout = self.test.timeout
+ elif self.options.timeout_multiplier <= 0:
+ timeout = None
+ else:
+ timeout = self.test.timeout * self.options.timeout_multiplier
+
+ is_parallel = test.is_parallel and self.options.num_processes > 1 and not self.options.gdb
+ self.runobj = TestRun(test, env, name, timeout, is_parallel)
+
+ if self.options.gdb:
+ self.console_mode = ConsoleUser.GDB
+ elif self.options.verbose and not is_parallel and not self.runobj.needs_parsing:
+ self.console_mode = ConsoleUser.STDOUT
+ else:
+ self.console_mode = ConsoleUser.LOGGER
+
+ def _get_test_cmd(self) -> T.Optional[T.List[str]]:
+ if self.test.fname[0].endswith('.jar'):
+ return ['java', '-jar'] + self.test.fname
+ elif not self.test.is_cross_built and run_with_mono(self.test.fname[0]):
+ return ['mono'] + self.test.fname
+ elif self.test.cmd_is_built and self.test.is_cross_built and self.test.needs_exe_wrapper:
+ if self.test.exe_runner is None:
+ # Can not run test on cross compiled executable
+ # because there is no execute wrapper.
+ return None
+ elif self.test.cmd_is_built:
+ # If the command is not built (ie, its a python script),
+ # then we don't check for the exe-wrapper
+ if not self.test.exe_runner.found():
+ msg = ('The exe_wrapper defined in the cross file {!r} was not '
+ 'found. Please check the command and/or add it to PATH.')
+ raise TestException(msg.format(self.test.exe_runner.name))
+ return self.test.exe_runner.get_command() + self.test.fname
+ return self.test.fname
+
+ def _get_cmd(self) -> T.Optional[T.List[str]]:
+ test_cmd = self._get_test_cmd()
+ if not test_cmd:
+ return None
+ return TestHarness.get_wrapper(self.options) + test_cmd
+
+ @property
+ def is_parallel(self) -> bool:
+ return self.runobj.is_parallel
+
+ @property
+ def visible_name(self) -> str:
+ return self.runobj.name
+
+ @property
+ def timeout(self) -> T.Optional[int]:
+ return self.runobj.timeout
+
+ async def run(self, harness: 'TestHarness') -> TestRun:
+ if self.cmd is None:
+ skip_stdout = 'Not run because can not execute cross compiled binaries.'
+ harness.log_start_test(self.runobj)
+ self.runobj.complete_skip(skip_stdout)
+ else:
+ cmd = self.cmd + self.test.cmd_args + self.options.test_args
+ self.runobj.start(cmd)
+ harness.log_start_test(self.runobj)
+ await self._run_cmd(harness, cmd)
+ return self.runobj
+
+ async def _run_subprocess(self, args: T.List[str], *,
+ stdout: int, stderr: int,
+ env: T.Dict[str, str], cwd: T.Optional[str]) -> TestSubprocess:
+ # Let gdb handle ^C instead of us
+ if self.options.gdb:
+ previous_sigint_handler = signal.getsignal(signal.SIGINT)
+ # Make the meson executable ignore SIGINT while gdb is running.
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+ def preexec_fn() -> None:
+ if self.options.gdb:
+ # Restore the SIGINT handler for the child process to
+ # ensure it can handle it.
+ signal.signal(signal.SIGINT, signal.SIG_DFL)
+ else:
+ # We don't want setsid() in gdb because gdb needs the
+ # terminal in order to handle ^C and not show tcsetpgrp()
+ # errors avoid not being able to use the terminal.
+ os.setsid()
+
+ def postwait_fn() -> None:
+ if self.options.gdb:
+ # Let us accept ^C again
+ signal.signal(signal.SIGINT, previous_sigint_handler)
+
+ p = await asyncio.create_subprocess_exec(*args,
+ stdout=stdout,
+ stderr=stderr,
+ env=env,
+ cwd=cwd,
+ preexec_fn=preexec_fn if not is_windows() else None)
+ return TestSubprocess(p, stdout=stdout, stderr=stderr,
+ postwait_fn=postwait_fn if not is_windows() else None)
+
+ async def _run_cmd(self, harness: 'TestHarness', cmd: T.List[str]) -> None:
+ if self.console_mode is ConsoleUser.GDB:
+ stdout = None
+ stderr = None
+ else:
+ stdout = asyncio.subprocess.PIPE
+ stderr = asyncio.subprocess.STDOUT \
+ if not self.options.split and not self.runobj.needs_parsing \
+ else asyncio.subprocess.PIPE
+
+ extra_cmd = [] # type: T.List[str]
+ if self.test.protocol is TestProtocol.GTEST:
+ gtestname = self.test.name
+ if self.test.workdir:
+ gtestname = os.path.join(self.test.workdir, self.test.name)
+ extra_cmd.append(f'--gtest_output=xml:{gtestname}.xml')
+
+ p = await self._run_subprocess(cmd + extra_cmd,
+ stdout=stdout,
+ stderr=stderr,
+ env=self.runobj.env,
+ cwd=self.test.workdir)
+
+ parse_task = None
+ if self.runobj.needs_parsing:
+ parse_coro = self.runobj.parse(harness, p.stdout_lines(self.console_mode))
+ parse_task = asyncio.ensure_future(parse_coro)
+
+ stdo_task, stde_task = p.communicate(self.console_mode)
+ returncode, result, additional_error = await p.wait(self.runobj.timeout)
+
+ if parse_task is not None:
+ res, error = await parse_task
+ if error:
+ additional_error = join_lines(additional_error, error)
+ result = result or res
+
+ stdo = await stdo_task if stdo_task else ''
+ stde = await stde_task if stde_task else ''
+ stde = join_lines(stde, additional_error)
+ self.runobj.complete(returncode, result, stdo, stde)
+
+
+class TestHarness:
+ def __init__(self, options: argparse.Namespace):
+ self.options = options
+ self.collected_failures = [] # type: T.List[TestRun]
+ self.fail_count = 0
+ self.expectedfail_count = 0
+ self.unexpectedpass_count = 0
+ self.success_count = 0
+ self.skip_count = 0
+ self.timeout_count = 0
+ self.test_count = 0
+ self.name_max_len = 0
+ self.is_run = False
+ self.loggers = [] # type: T.List[TestLogger]
+ self.loggers.append(ConsoleLogger())
+ self.need_console = False
+
+ self.logfile_base = None # type: T.Optional[str]
+ if self.options.logbase and not self.options.gdb:
+ namebase = None
+ self.logfile_base = os.path.join(self.options.wd, 'meson-logs', self.options.logbase)
+
+ if self.options.wrapper:
+ namebase = os.path.basename(self.get_wrapper(self.options)[0])
+ elif self.options.setup:
+ namebase = self.options.setup.replace(":", "_")
+
+ if namebase:
+ self.logfile_base += '-' + namebase.replace(' ', '_')
+
+ startdir = os.getcwd()
+ try:
+ os.chdir(self.options.wd)
+ self.build_data = build.load(os.getcwd())
+ if not self.options.setup:
+ self.options.setup = self.build_data.test_setup_default_name
+ if self.options.benchmark:
+ self.tests = self.load_tests('meson_benchmark_setup.dat')
+ else:
+ self.tests = self.load_tests('meson_test_setup.dat')
+ finally:
+ os.chdir(startdir)
+
+ ss = set()
+ for t in self.tests:
+ for s in t.suite:
+ ss.add(s)
+ self.suites = list(ss)
+
+ def load_tests(self, file_name: str) -> T.List[TestSerialisation]:
+ datafile = Path('meson-private') / file_name
+ if not datafile.is_file():
+ raise TestException(f'Directory {self.options.wd!r} does not seem to be a Meson build directory.')
+ with datafile.open('rb') as f:
+ objs = check_testdata(pickle.load(f))
+ return objs
+
+ def __enter__(self) -> 'TestHarness':
+ return self
+
+ def __exit__(self, exc_type: T.Any, exc_value: T.Any, traceback: T.Any) -> None:
+ self.close_logfiles()
+
+ def close_logfiles(self) -> None:
+ for l in self.loggers:
+ l.close()
+
+ def get_test_setup(self, test: T.Optional[TestSerialisation]) -> build.TestSetup:
+ if ':' in self.options.setup:
+ if self.options.setup not in self.build_data.test_setups:
+ sys.exit(f"Unknown test setup '{self.options.setup}'.")
+ return self.build_data.test_setups[self.options.setup]
+ else:
+ full_name = test.project_name + ":" + self.options.setup
+ if full_name not in self.build_data.test_setups:
+ sys.exit(f"Test setup '{self.options.setup}' not found from project '{test.project_name}'.")
+ return self.build_data.test_setups[full_name]
+
+ def merge_setup_options(self, options: argparse.Namespace, test: TestSerialisation) -> T.Dict[str, str]:
+ current = self.get_test_setup(test)
+ if not options.gdb:
+ options.gdb = current.gdb
+ if options.gdb:
+ options.verbose = True
+ if options.timeout_multiplier is None:
+ options.timeout_multiplier = current.timeout_multiplier
+ # if options.env is None:
+ # options.env = current.env # FIXME, should probably merge options here.
+ if options.wrapper is None:
+ options.wrapper = current.exe_wrapper
+ elif current.exe_wrapper:
+ sys.exit('Conflict: both test setup and command line specify an exe wrapper.')
+ return current.env.get_env(os.environ.copy())
+
+ def get_test_runner(self, test: TestSerialisation) -> SingleTestRunner:
+ name = self.get_pretty_suite(test)
+ options = deepcopy(self.options)
+ if self.options.setup:
+ env = self.merge_setup_options(options, test)
+ else:
+ env = os.environ.copy()
+ test_env = test.env.get_env(env)
+ env.update(test_env)
+ if (test.is_cross_built and test.needs_exe_wrapper and
+ test.exe_runner and test.exe_runner.found()):
+ env['MESON_EXE_WRAPPER'] = join_args(test.exe_runner.get_command())
+ return SingleTestRunner(test, env, name, options)
+
+ def process_test_result(self, result: TestRun) -> None:
+ if result.res is TestResult.TIMEOUT:
+ self.timeout_count += 1
+ elif result.res is TestResult.SKIP:
+ self.skip_count += 1
+ elif result.res is TestResult.OK:
+ self.success_count += 1
+ elif result.res in {TestResult.FAIL, TestResult.ERROR, TestResult.INTERRUPT}:
+ self.fail_count += 1
+ elif result.res is TestResult.EXPECTEDFAIL:
+ self.expectedfail_count += 1
+ elif result.res is TestResult.UNEXPECTEDPASS:
+ self.unexpectedpass_count += 1
+ else:
+ sys.exit(f'Unknown test result encountered: {result.res}')
+
+ if result.res.is_bad():
+ self.collected_failures.append(result)
+ for l in self.loggers:
+ l.log(self, result)
+
+ @property
+ def numlen(self) -> int:
+ return len(str(self.test_count))
+
+ @property
+ def max_left_width(self) -> int:
+ return 2 * self.numlen + 2
+
+ def format(self, result: TestRun, colorize: bool,
+ max_left_width: int = 0,
+ prefix: str = '',
+ left: T.Optional[str] = None,
+ middle: T.Optional[str] = None,
+ right: T.Optional[str] = None) -> str:
+
+ if left is None:
+ left = '{num:{numlen}}/{testcount} '.format(
+ numlen=self.numlen,
+ num=result.num,
+ testcount=self.test_count)
+
+ # A non-default max_left_width lets the logger print more stuff before the
+ # name, while ensuring that the rightmost columns remain aligned.
+ max_left_width = max(max_left_width, self.max_left_width)
+
+ if middle is None:
+ middle = result.name
+ extra_mid_width = max_left_width + self.name_max_len + 1 - uniwidth(middle) - uniwidth(left) - uniwidth(prefix)
+ middle += ' ' * max(1, extra_mid_width)
+
+ if right is None:
+ right = '{res} {dur:{durlen}.2f}s'.format(
+ res=result.res.get_text(colorize),
+ dur=result.duration,
+ durlen=self.duration_max_len + 3)
+ detail = result.detail
+ if detail:
+ right += ' ' + detail
+ return prefix + left + middle + right
+
+ def summary(self) -> str:
+ return textwrap.dedent('''
+
+ Ok: {:<4}
+ Expected Fail: {:<4}
+ Fail: {:<4}
+ Unexpected Pass: {:<4}
+ Skipped: {:<4}
+ Timeout: {:<4}
+ ''').format(self.success_count, self.expectedfail_count, self.fail_count,
+ self.unexpectedpass_count, self.skip_count, self.timeout_count)
+
+ def total_failure_count(self) -> int:
+ return self.fail_count + self.unexpectedpass_count + self.timeout_count
+
+ def doit(self) -> int:
+ if self.is_run:
+ raise RuntimeError('Test harness object can only be used once.')
+ self.is_run = True
+ tests = self.get_tests()
+ if not tests:
+ return 0
+ if not self.options.no_rebuild and not rebuild_deps(self.options.wd, tests):
+ # We return 125 here in case the build failed.
+ # The reason is that exit code 125 tells `git bisect run` that the current
+ # commit should be skipped. Thus users can directly use `meson test` to
+ # bisect without needing to handle the does-not-build case separately in a
+ # wrapper script.
+ sys.exit(125)
+
+ self.name_max_len = max([uniwidth(self.get_pretty_suite(test)) for test in tests])
+ startdir = os.getcwd()
+ try:
+ os.chdir(self.options.wd)
+ runners = [] # type: T.List[SingleTestRunner]
+ for i in range(self.options.repeat):
+ runners.extend(self.get_test_runner(test) for test in tests)
+ if i == 0:
+ self.duration_max_len = max([len(str(int(runner.timeout or 99)))
+ for runner in runners])
+ # Disable the progress report if it gets in the way
+ self.need_console = any(runner.console_mode is not ConsoleUser.LOGGER
+ for runner in runners)
+
+ self.test_count = len(runners)
+ self.run_tests(runners)
+ finally:
+ os.chdir(startdir)
+ return self.total_failure_count()
+
+ @staticmethod
+ def split_suite_string(suite: str) -> T.Tuple[str, str]:
+ if ':' in suite:
+ split = suite.split(':', 1)
+ assert len(split) == 2
+ return split[0], split[1]
+ else:
+ return suite, ""
+
+ @staticmethod
+ def test_in_suites(test: TestSerialisation, suites: T.List[str]) -> bool:
+ for suite in suites:
+ (prj_match, st_match) = TestHarness.split_suite_string(suite)
+ for prjst in test.suite:
+ (prj, st) = TestHarness.split_suite_string(prjst)
+
+ # the SUITE can be passed as
+ # suite_name
+ # or
+ # project_name:suite_name
+ # so we need to select only the test belonging to project_name
+
+ # this if handle the first case (i.e., SUITE == suite_name)
+
+ # in this way we can run tests belonging to different
+ # (sub)projects which share the same suite_name
+ if not st_match and st == prj_match:
+ return True
+
+ # these two conditions are needed to handle the second option
+ # i.e., SUITE == project_name:suite_name
+
+ # in this way we select the only the tests of
+ # project_name with suite_name
+ if prj_match and prj != prj_match:
+ continue
+ if st_match and st != st_match:
+ continue
+ return True
+ return False
+
+ def test_suitable(self, test: TestSerialisation) -> bool:
+ if TestHarness.test_in_suites(test, self.options.exclude_suites):
+ return False
+
+ if self.options.include_suites:
+ # Both force inclusion (overriding add_test_setup) and exclude
+ # everything else
+ return TestHarness.test_in_suites(test, self.options.include_suites)
+
+ if self.options.setup:
+ setup = self.get_test_setup(test)
+ if TestHarness.test_in_suites(test, setup.exclude_suites):
+ return False
+
+ return True
+
+ def tests_from_args(self, tests: T.List[TestSerialisation]) -> T.Generator[TestSerialisation, None, None]:
+ '''
+ Allow specifying test names like "meson test foo1 foo2", where test('foo1', ...)
+
+ Also support specifying the subproject to run tests from like
+ "meson test subproj:" (all tests inside subproj) or "meson test subproj:foo1"
+ to run foo1 inside subproj. Coincidentally also "meson test :foo1" to
+ run all tests with that name across all subprojects, which is
+ identical to "meson test foo1"
+ '''
+ for arg in self.options.args:
+ if ':' in arg:
+ subproj, name = arg.split(':', maxsplit=1)
+ else:
+ subproj, name = '', arg
+ for t in tests:
+ if subproj and t.project_name != subproj:
+ continue
+ if name and t.name != name:
+ continue
+ yield t
+
+ def get_tests(self) -> T.List[TestSerialisation]:
+ if not self.tests:
+ print('No tests defined.')
+ return []
+
+ tests = [t for t in self.tests if self.test_suitable(t)]
+ if self.options.args:
+ tests = list(self.tests_from_args(tests))
+
+ if not tests:
+ print('No suitable tests defined.')
+ return []
+
+ return tests
+
+ def flush_logfiles(self) -> None:
+ for l in self.loggers:
+ l.flush()
+
+ def open_logfiles(self) -> None:
+ if not self.logfile_base:
+ return
+
+ self.loggers.append(JunitBuilder(self.logfile_base + '.junit.xml'))
+ self.loggers.append(JsonLogfileBuilder(self.logfile_base + '.json'))
+ self.loggers.append(TextLogfileBuilder(self.logfile_base + '.txt', errors='surrogateescape'))
+
+ @staticmethod
+ def get_wrapper(options: argparse.Namespace) -> T.List[str]:
+ wrap = [] # type: T.List[str]
+ if options.gdb:
+ wrap = [options.gdb_path, '--quiet', '--nh']
+ if options.repeat > 1:
+ wrap += ['-ex', 'run', '-ex', 'quit']
+ # Signal the end of arguments to gdb
+ wrap += ['--args']
+ if options.wrapper:
+ wrap += options.wrapper
+ return wrap
+
+ def get_pretty_suite(self, test: TestSerialisation) -> str:
+ if len(self.suites) > 1 and test.suite:
+ rv = TestHarness.split_suite_string(test.suite[0])[0]
+ s = "+".join(TestHarness.split_suite_string(s)[1] for s in test.suite)
+ if s:
+ rv += ":"
+ return rv + s + " / " + test.name
+ else:
+ return test.name
+
+ def run_tests(self, runners: T.List[SingleTestRunner]) -> None:
+ try:
+ self.open_logfiles()
+ # Replace with asyncio.run once we can require Python 3.7
+ loop = asyncio.get_event_loop()
+ loop.run_until_complete(self._run_tests(runners))
+ finally:
+ self.close_logfiles()
+
+ def log_subtest(self, test: TestRun, s: str, res: TestResult) -> None:
+ for l in self.loggers:
+ l.log_subtest(self, test, s, res)
+
+ def log_start_test(self, test: TestRun) -> None:
+ for l in self.loggers:
+ l.start_test(self, test)
+
+ async def _run_tests(self, runners: T.List[SingleTestRunner]) -> None:
+ semaphore = asyncio.Semaphore(self.options.num_processes)
+ futures = deque() # type: T.Deque[asyncio.Future]
+ running_tests = dict() # type: T.Dict[asyncio.Future, str]
+ interrupted = False
+ ctrlc_times = deque(maxlen=MAX_CTRLC) # type: T.Deque[float]
+
+ async def run_test(test: SingleTestRunner) -> None:
+ async with semaphore:
+ if interrupted or (self.options.repeat > 1 and self.fail_count):
+ return
+ res = await test.run(self)
+ self.process_test_result(res)
+
+ def test_done(f: asyncio.Future) -> None:
+ if not f.cancelled():
+ f.result()
+ futures.remove(f)
+ try:
+ del running_tests[f]
+ except KeyError:
+ pass
+
+ def cancel_one_test(warn: bool) -> None:
+ future = futures.popleft()
+ futures.append(future)
+ if warn:
+ self.flush_logfiles()
+ mlog.warning('CTRL-C detected, interrupting {}'.format(running_tests[future]))
+ del running_tests[future]
+ future.cancel()
+
+ def cancel_all_tests() -> None:
+ nonlocal interrupted
+ interrupted = True
+ while running_tests:
+ cancel_one_test(False)
+
+ def sigterm_handler() -> None:
+ if interrupted:
+ return
+ self.flush_logfiles()
+ mlog.warning('Received SIGTERM, exiting')
+ cancel_all_tests()
+
+ def sigint_handler() -> None:
+ # We always pick the longest-running future that has not been cancelled
+ # If all the tests have been CTRL-C'ed, just stop
+ nonlocal interrupted
+ if interrupted:
+ return
+ ctrlc_times.append(asyncio.get_event_loop().time())
+ if len(ctrlc_times) == MAX_CTRLC and ctrlc_times[-1] - ctrlc_times[0] < 1:
+ self.flush_logfiles()
+ mlog.warning('CTRL-C detected, exiting')
+ cancel_all_tests()
+ elif running_tests:
+ cancel_one_test(True)
+ else:
+ self.flush_logfiles()
+ mlog.warning('CTRL-C detected, exiting')
+ interrupted = True
+
+ for l in self.loggers:
+ l.start(self)
+
+ if sys.platform != 'win32':
+ if os.getpgid(0) == os.getpid():
+ asyncio.get_event_loop().add_signal_handler(signal.SIGINT, sigint_handler)
+ else:
+ asyncio.get_event_loop().add_signal_handler(signal.SIGINT, sigterm_handler)
+ asyncio.get_event_loop().add_signal_handler(signal.SIGTERM, sigterm_handler)
+ try:
+ for runner in runners:
+ if not runner.is_parallel:
+ await complete_all(futures)
+ future = asyncio.ensure_future(run_test(runner))
+ futures.append(future)
+ running_tests[future] = runner.visible_name
+ future.add_done_callback(test_done)
+ if not runner.is_parallel:
+ await complete(future)
+ if self.options.repeat > 1 and self.fail_count:
+ break
+
+ await complete_all(futures)
+ finally:
+ if sys.platform != 'win32':
+ asyncio.get_event_loop().remove_signal_handler(signal.SIGINT)
+ asyncio.get_event_loop().remove_signal_handler(signal.SIGTERM)
+ for l in self.loggers:
+ await l.finish(self)
+
+def list_tests(th: TestHarness) -> bool:
+ tests = th.get_tests()
+ for t in tests:
+ print(th.get_pretty_suite(t))
+ return not tests
+
+def rebuild_deps(wd: str, tests: T.List[TestSerialisation]) -> bool:
+ def convert_path_to_target(path: str) -> str:
+ path = os.path.relpath(path, wd)
+ if os.sep != '/':
+ path = path.replace(os.sep, '/')
+ return path
+
+ if not (Path(wd) / 'build.ninja').is_file():
+ print('Only ninja backend is supported to rebuild tests before running them.')
+ return True
+
+ ninja = environment.detect_ninja()
+ if not ninja:
+ print("Can't find ninja, can't rebuild test.")
+ return False
+
+ depends = set() # type: T.Set[str]
+ targets = set() # type: T.Set[str]
+ intro_targets = dict() # type: T.Dict[str, T.List[str]]
+ for target in load_info_file(get_infodir(wd), kind='targets'):
+ intro_targets[target['id']] = [
+ convert_path_to_target(f)
+ for f in target['filename']]
+ for t in tests:
+ for d in t.depends:
+ if d in depends:
+ continue
+ depends.update(d)
+ targets.update(intro_targets[d])
+
+ ret = subprocess.run(ninja + ['-C', wd] + sorted(targets)).returncode
+ if ret != 0:
+ print(f'Could not rebuild {wd}')
+ return False
+
+ return True
+
+def run(options: argparse.Namespace) -> int:
+ if options.benchmark:
+ options.num_processes = 1
+
+ if options.verbose and options.quiet:
+ print('Can not be both quiet and verbose at the same time.')
+ return 1
+
+ check_bin = None
+ if options.gdb:
+ options.verbose = True
+ if options.wrapper:
+ print('Must not specify both a wrapper and gdb at the same time.')
+ return 1
+ check_bin = 'gdb'
+
+ if options.wrapper:
+ check_bin = options.wrapper[0]
+
+ if sys.platform == 'win32':
+ loop = asyncio.ProactorEventLoop()
+ asyncio.set_event_loop(loop)
+
+ if check_bin is not None:
+ exe = ExternalProgram(check_bin, silent=True)
+ if not exe.found():
+ print(f'Could not find requested program: {check_bin!r}')
+ return 1
+
+ with TestHarness(options) as th:
+ try:
+ if options.list:
+ return list_tests(th)
+ return th.doit()
+ except TestException as e:
+ print('Meson test encountered an error:\n')
+ if os.environ.get('MESON_FORCE_BACKTRACE'):
+ raise e
+ else:
+ print(e)
+ return 1
+
+def run_with_args(args: T.List[str]) -> int:
+ parser = argparse.ArgumentParser(prog='meson test')
+ add_arguments(parser)
+ options = parser.parse_args(args)
+ return run(options)
diff --git a/meson/mesonbuild/munstable_coredata.py b/meson/mesonbuild/munstable_coredata.py
new file mode 100644
index 000000000..0ca8f3398
--- /dev/null
+++ b/meson/mesonbuild/munstable_coredata.py
@@ -0,0 +1,114 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from . import coredata as cdata
+from .mesonlib import MachineChoice, OptionKey
+
+import os.path
+import pprint
+import textwrap
+
+def add_arguments(parser):
+ parser.add_argument('--all', action='store_true', dest='all', default=False,
+ help='Show data not used by current backend.')
+
+ parser.add_argument('builddir', nargs='?', default='.', help='The build directory')
+
+
+def dump_compilers(compilers):
+ for lang, compiler in compilers.items():
+ print(' ' + lang + ':')
+ print(' Id: ' + compiler.id)
+ print(' Command: ' + ' '.join(compiler.exelist))
+ if compiler.full_version:
+ print(' Full version: ' + compiler.full_version)
+ if compiler.version:
+ print(' Detected version: ' + compiler.version)
+
+
+def dump_guids(d):
+ for name, value in d.items():
+ print(' ' + name + ': ' + value)
+
+
+def run(options):
+ datadir = 'meson-private'
+ if options.builddir is not None:
+ datadir = os.path.join(options.builddir, datadir)
+ if not os.path.isdir(datadir):
+ print('Current directory is not a build dir. Please specify it or '
+ 'change the working directory to it.')
+ return 1
+
+ all_backends = options.all
+
+ print('This is a dump of the internal unstable cache of meson. This is for debugging only.')
+ print('Do NOT parse, this will change from version to version in incompatible ways')
+ print('')
+
+ coredata = cdata.load(options.builddir)
+ backend = coredata.get_option(OptionKey('backend'))
+ for k, v in sorted(coredata.__dict__.items()):
+ if k in ('backend_options', 'base_options', 'builtins', 'compiler_options', 'user_options'):
+ # use `meson configure` to view these
+ pass
+ elif k in ['install_guid', 'test_guid', 'regen_guid']:
+ if all_backends or backend.startswith('vs'):
+ print(k + ': ' + v)
+ elif k == 'target_guids':
+ if all_backends or backend.startswith('vs'):
+ print(k + ':')
+ dump_guids(v)
+ elif k in ['lang_guids']:
+ if all_backends or backend.startswith('vs') or backend == 'xcode':
+ print(k + ':')
+ dump_guids(v)
+ elif k == 'meson_command':
+ if all_backends or backend.startswith('vs'):
+ print('Meson command used in build file regeneration: ' + ' '.join(v))
+ elif k == 'pkgconf_envvar':
+ print('Last seen PKGCONFIG environment variable value: ' + v)
+ elif k == 'version':
+ print('Meson version: ' + v)
+ elif k == 'cross_files':
+ if v:
+ print('Cross File: ' + ' '.join(v))
+ elif k == 'config_files':
+ if v:
+ print('Native File: ' + ' '.join(v))
+ elif k == 'compilers':
+ for for_machine in MachineChoice:
+ print('Cached {} machine compilers:'.format(
+ for_machine.get_lower_case_name()))
+ dump_compilers(v[for_machine])
+ elif k == 'deps':
+ def print_dep(dep_key, dep):
+ print(' ' + dep_key[0] + ": ")
+ print(' compile args: ' + repr(dep.get_compile_args()))
+ print(' link args: ' + repr(dep.get_link_args()))
+ if dep.get_sources():
+ print(' sources: ' + repr(dep.get_sources()))
+ print(' version: ' + repr(dep.get_version()))
+
+ for for_machine in iter(MachineChoice):
+ items_list = list(sorted(v[for_machine].items()))
+ if items_list:
+ print('Cached dependencies for {} machine' % for_machine.get_lower_case_name())
+ for dep_key, deps in items_list:
+ for dep in deps:
+ print_dep(dep_key, dep)
+ else:
+ print(k + ':')
+ print(textwrap.indent(pprint.pformat(v), ' '))
diff --git a/meson/mesonbuild/optinterpreter.py b/meson/mesonbuild/optinterpreter.py
new file mode 100644
index 000000000..c91363df0
--- /dev/null
+++ b/meson/mesonbuild/optinterpreter.py
@@ -0,0 +1,234 @@
+# Copyright 2013-2014 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+import functools
+import typing as T
+
+from . import coredata
+from . import mesonlib
+from . import mparser
+from . import mlog
+from .interpreterbase import FeatureNew
+
+if T.TYPE_CHECKING:
+ from .interpreterbase import TV_func
+
+class OptionException(mesonlib.MesonException):
+ pass
+
+
+def permitted_kwargs(permitted: T.Set[str]) -> T.Callable[..., T.Any]:
+ """Function that validates kwargs for options."""
+ def _wraps(func: 'TV_func') -> 'TV_func':
+ @functools.wraps(func)
+ def _inner(name: str, description: str, kwargs: T.Dict[str, T.Any]) -> T.Any:
+ bad = [a for a in kwargs.keys() if a not in permitted]
+ if bad:
+ raise OptionException('Invalid kwargs for option "{}": "{}"'.format(
+ name, ' '.join(bad)))
+ return func(description, kwargs)
+ return T.cast('TV_func', _inner)
+ return _wraps
+
+
+optname_regex = re.compile('[^a-zA-Z0-9_-]')
+
+@permitted_kwargs({'value', 'yield'})
+def string_parser(description: str, kwargs: T.Dict[str, T.Any]) -> coredata.UserStringOption:
+ return coredata.UserStringOption(description,
+ kwargs.get('value', ''),
+ kwargs.get('yield', coredata.default_yielding))
+
+@permitted_kwargs({'value', 'yield'})
+def boolean_parser(description: str, kwargs: T.Dict[str, T.Any]) -> coredata.UserBooleanOption:
+ return coredata.UserBooleanOption(description,
+ kwargs.get('value', True),
+ kwargs.get('yield', coredata.default_yielding))
+
+@permitted_kwargs({'value', 'yield', 'choices'})
+def combo_parser(description: str, kwargs: T.Dict[str, T.Any]) -> coredata.UserComboOption:
+ if 'choices' not in kwargs:
+ raise OptionException('Combo option missing "choices" keyword.')
+ choices = kwargs['choices']
+ if not isinstance(choices, list):
+ raise OptionException('Combo choices must be an array.')
+ for i in choices:
+ if not isinstance(i, str):
+ raise OptionException('Combo choice elements must be strings.')
+ return coredata.UserComboOption(description,
+ choices,
+ kwargs.get('value', choices[0]),
+ kwargs.get('yield', coredata.default_yielding),)
+
+
+@permitted_kwargs({'value', 'min', 'max', 'yield'})
+def integer_parser(description: str, kwargs: T.Dict[str, T.Any]) -> coredata.UserIntegerOption:
+ if 'value' not in kwargs:
+ raise OptionException('Integer option must contain value argument.')
+ inttuple = (kwargs.get('min', None), kwargs.get('max', None), kwargs['value'])
+ return coredata.UserIntegerOption(description,
+ inttuple,
+ kwargs.get('yield', coredata.default_yielding))
+
+# FIXME: Cannot use FeatureNew while parsing options because we parse it before
+# reading options in project(). See func_project() in interpreter.py
+#@FeatureNew('array type option()', '0.44.0')
+@permitted_kwargs({'value', 'yield', 'choices'})
+def string_array_parser(description: str, kwargs: T.Dict[str, T.Any]) -> coredata.UserArrayOption:
+ if 'choices' in kwargs:
+ choices = kwargs['choices']
+ if not isinstance(choices, list):
+ raise OptionException('Array choices must be an array.')
+ for i in choices:
+ if not isinstance(i, str):
+ raise OptionException('Array choice elements must be strings.')
+ value = kwargs.get('value', choices)
+ else:
+ choices = None
+ value = kwargs.get('value', [])
+ if not isinstance(value, list):
+ raise OptionException('Array choices must be passed as an array.')
+ return coredata.UserArrayOption(description,
+ value,
+ choices=choices,
+ yielding=kwargs.get('yield', coredata.default_yielding))
+
+@permitted_kwargs({'value', 'yield'})
+def feature_parser(description: str, kwargs: T.Dict[str, T.Any]) -> coredata.UserFeatureOption:
+ return coredata.UserFeatureOption(description,
+ kwargs.get('value', 'auto'),
+ yielding=kwargs.get('yield', coredata.default_yielding))
+
+option_types = {'string': string_parser,
+ 'boolean': boolean_parser,
+ 'combo': combo_parser,
+ 'integer': integer_parser,
+ 'array': string_array_parser,
+ 'feature': feature_parser,
+ } # type: T.Dict[str, T.Callable[[str, str, T.Dict[str, T.Any]], coredata.UserOption]]
+
+class OptionInterpreter:
+ def __init__(self, subproject: str) -> None:
+ self.options: 'coredata.KeyedOptionDictType' = {}
+ self.subproject = subproject
+
+ def process(self, option_file: str) -> None:
+ try:
+ with open(option_file, encoding='utf-8') as f:
+ ast = mparser.Parser(f.read(), option_file).parse()
+ except mesonlib.MesonException as me:
+ me.file = option_file
+ raise me
+ if not isinstance(ast, mparser.CodeBlockNode):
+ e = OptionException('Option file is malformed.')
+ e.lineno = ast.lineno()
+ e.file = option_file
+ raise e
+ for cur in ast.lines:
+ try:
+ self.evaluate_statement(cur)
+ except mesonlib.MesonException as e:
+ e.lineno = cur.lineno
+ e.colno = cur.colno
+ e.file = option_file
+ raise e
+ except Exception as e:
+ raise mesonlib.MesonException(
+ str(e), lineno=cur.lineno, colno=cur.colno, file=option_file)
+
+ def reduce_single(self, arg: T.Union[str, mparser.BaseNode]) -> T.Union[str, int, bool, T.Sequence[T.Union[str, int, bool]]]:
+ if isinstance(arg, str):
+ return arg
+ elif isinstance(arg, (mparser.StringNode, mparser.BooleanNode,
+ mparser.NumberNode)):
+ return arg.value
+ elif isinstance(arg, mparser.ArrayNode):
+ lr = [self.reduce_single(curarg) for curarg in arg.args.arguments]
+ # mypy really struggles with recursive flattening, help it out
+ return T.cast(T.Sequence[T.Union[str, int, bool]], lr)
+ elif isinstance(arg, mparser.UMinusNode):
+ res = self.reduce_single(arg.value)
+ if not isinstance(res, (int, float)):
+ raise OptionException('Token after "-" is not a number')
+ FeatureNew.single_use('negative numbers in meson_options.txt', '0.54.1', self.subproject)
+ return -res
+ elif isinstance(arg, mparser.NotNode):
+ res = self.reduce_single(arg.value)
+ if not isinstance(res, bool):
+ raise OptionException('Token after "not" is not a a boolean')
+ FeatureNew.single_use('negation ("not") in meson_options.txt', '0.54.1', self.subproject)
+ return not res
+ elif isinstance(arg, mparser.ArithmeticNode):
+ l = self.reduce_single(arg.left)
+ r = self.reduce_single(arg.right)
+ if not (arg.operation == 'add' and isinstance(l, str) and isinstance(r, str)):
+ raise OptionException('Only string concatenation with the "+" operator is allowed')
+ FeatureNew.single_use('string concatenation in meson_options.txt', '0.55.0', self.subproject)
+ return l + r
+ else:
+ raise OptionException('Arguments may only be string, int, bool, or array of those.')
+
+ def reduce_arguments(self, args: mparser.ArgumentNode) -> T.Tuple[
+ T.List[T.Union[str, int, bool, T.Sequence[T.Union[str, int, bool]]]],
+ T.Dict[str, T.Union[str, int, bool, T.Sequence[T.Union[str, int, bool]]]]]:
+ if args.incorrect_order():
+ raise OptionException('All keyword arguments must be after positional arguments.')
+ reduced_pos = [self.reduce_single(arg) for arg in args.arguments]
+ reduced_kw = {}
+ for key in args.kwargs.keys():
+ if not isinstance(key, mparser.IdNode):
+ raise OptionException('Keyword argument name is not a string.')
+ a = args.kwargs[key]
+ reduced_kw[key.value] = self.reduce_single(a)
+ return reduced_pos, reduced_kw
+
+ def evaluate_statement(self, node: mparser.BaseNode) -> None:
+ if not isinstance(node, mparser.FunctionNode):
+ raise OptionException('Option file may only contain option definitions')
+ func_name = node.func_name
+ if func_name != 'option':
+ raise OptionException('Only calls to option() are allowed in option files.')
+ (posargs, kwargs) = self.reduce_arguments(node.args)
+
+ if len(posargs) != 1:
+ raise OptionException('Option() must have one (and only one) positional argument')
+ opt_name = posargs[0]
+ if not isinstance(opt_name, str):
+ raise OptionException('Positional argument must be a string.')
+ if optname_regex.search(opt_name) is not None:
+ raise OptionException('Option names can only contain letters, numbers or dashes.')
+ key = mesonlib.OptionKey.from_string(opt_name).evolve(subproject=self.subproject)
+ if not key.is_project():
+ raise OptionException('Option name %s is reserved.' % opt_name)
+
+ if 'yield' in kwargs:
+ FeatureNew.single_use('option yield', '0.45.0', self.subproject)
+
+ if 'type' not in kwargs:
+ raise OptionException('Option call missing mandatory "type" keyword argument')
+ opt_type = kwargs.pop('type')
+ if not isinstance(opt_type, str):
+ raise OptionException('option() type must be a string')
+ if opt_type not in option_types:
+ raise OptionException('Unknown type %s.' % opt_type)
+
+ description = kwargs.pop('description', '')
+ if not isinstance(description, str):
+ raise OptionException('Option descriptions must be strings.')
+
+ opt = option_types[opt_type](opt_name, description, kwargs)
+ if opt.description == '':
+ opt.description = opt_name
+ self.options[key] = opt
diff --git a/meson/mesonbuild/programs.py b/meson/mesonbuild/programs.py
new file mode 100644
index 000000000..af27801f7
--- /dev/null
+++ b/meson/mesonbuild/programs.py
@@ -0,0 +1,386 @@
+# Copyright 2013-2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Representations and logic for External and Internal Programs."""
+
+import functools
+import os
+import shutil
+import stat
+import sys
+import re
+import typing as T
+from pathlib import Path
+
+from . import mesonlib
+from . import mlog
+from .mesonlib import MachineChoice
+
+if T.TYPE_CHECKING:
+ from .environment import Environment
+ from .interpreter import Interpreter
+
+
+class ExternalProgram(mesonlib.HoldableObject):
+
+ """A program that is found on the system."""
+
+ windows_exts = ('exe', 'msc', 'com', 'bat', 'cmd')
+ for_machine = MachineChoice.BUILD
+
+ def __init__(self, name: str, command: T.Optional[T.List[str]] = None,
+ silent: bool = False, search_dir: T.Optional[str] = None,
+ extra_search_dirs: T.Optional[T.List[str]] = None):
+ self.name = name
+ self.path: T.Optional[str] = None
+ self.cached_version: T.Optional[str] = None
+ if command is not None:
+ self.command = mesonlib.listify(command)
+ if mesonlib.is_windows():
+ cmd = self.command[0]
+ args = self.command[1:]
+ # Check whether the specified cmd is a path to a script, in
+ # which case we need to insert the interpreter. If not, try to
+ # use it as-is.
+ ret = self._shebang_to_cmd(cmd)
+ if ret:
+ self.command = ret + args
+ else:
+ self.command = [cmd] + args
+ else:
+ all_search_dirs = [search_dir]
+ if extra_search_dirs:
+ all_search_dirs += extra_search_dirs
+ for d in all_search_dirs:
+ self.command = self._search(name, d)
+ if self.found():
+ break
+
+ if self.found():
+ # Set path to be the last item that is actually a file (in order to
+ # skip options in something like ['python', '-u', 'file.py']. If we
+ # can't find any components, default to the last component of the path.
+ for arg in reversed(self.command):
+ if arg is not None and os.path.isfile(arg):
+ self.path = arg
+ break
+ else:
+ self.path = self.command[-1]
+
+ if not silent:
+ # ignore the warning because derived classes never call this __init__
+ # method, and thus only the found() method of this class is ever executed
+ if self.found(): # lgtm [py/init-calls-subclass]
+ mlog.log('Program', mlog.bold(name), 'found:', mlog.green('YES'),
+ '(%s)' % ' '.join(self.command))
+ else:
+ mlog.log('Program', mlog.bold(name), 'found:', mlog.red('NO'))
+
+ def summary_value(self) -> T.Union[str, mlog.AnsiDecorator]:
+ if not self.found():
+ return mlog.red('NO')
+ return self.path
+
+ def __repr__(self) -> str:
+ r = '<{} {!r} -> {!r}>'
+ return r.format(self.__class__.__name__, self.name, self.command)
+
+ def description(self) -> str:
+ '''Human friendly description of the command'''
+ return ' '.join(self.command)
+
+ def get_version(self, interpreter: 'Interpreter') -> str:
+ if not self.cached_version:
+ raw_cmd = self.get_command() + ['--version']
+ cmd: T.List[T.Union[str, ExternalProgram]] = [self, '--version']
+ res = interpreter.run_command_impl(interpreter.current_node, cmd, {}, True)
+ if res.returncode != 0:
+ m = 'Running {!r} failed'
+ raise mesonlib.MesonException(m.format(raw_cmd))
+ output = res.stdout.strip()
+ if not output:
+ output = res.stderr.strip()
+ match = re.search(r'([0-9][0-9\.]+)', output)
+ if not match:
+ m = 'Could not find a version number in output of {!r}'
+ raise mesonlib.MesonException(m.format(raw_cmd))
+ self.cached_version = match.group(1)
+ return self.cached_version
+
+ @classmethod
+ def from_bin_list(cls, env: 'Environment', for_machine: MachineChoice, name: str) -> 'ExternalProgram':
+ # There is a static `for_machine` for this class because the binary
+ # aways runs on the build platform. (It's host platform is our build
+ # platform.) But some external programs have a target platform, so this
+ # is what we are specifying here.
+ command = env.lookup_binary_entry(for_machine, name)
+ if command is None:
+ return NonExistingExternalProgram()
+ return cls.from_entry(name, command)
+
+ @staticmethod
+ @functools.lru_cache(maxsize=None)
+ def _windows_sanitize_path(path: str) -> str:
+ # Ensure that we use USERPROFILE even when inside MSYS, MSYS2, Cygwin, etc.
+ if 'USERPROFILE' not in os.environ:
+ return path
+ # The WindowsApps directory is a bit of a problem. It contains
+ # some zero-sized .exe files which have "reparse points", that
+ # might either launch an installed application, or might open
+ # a page in the Windows Store to download the application.
+ #
+ # To handle the case where the python interpreter we're
+ # running on came from the Windows Store, if we see the
+ # WindowsApps path in the search path, replace it with
+ # dirname(sys.executable).
+ appstore_dir = Path(os.environ['USERPROFILE']) / 'AppData' / 'Local' / 'Microsoft' / 'WindowsApps'
+ paths = []
+ for each in path.split(os.pathsep):
+ if Path(each) != appstore_dir:
+ paths.append(each)
+ elif 'WindowsApps' in sys.executable:
+ paths.append(os.path.dirname(sys.executable))
+ return os.pathsep.join(paths)
+
+ @staticmethod
+ def from_entry(name: str, command: T.Union[str, T.List[str]]) -> 'ExternalProgram':
+ if isinstance(command, list):
+ if len(command) == 1:
+ command = command[0]
+ # We cannot do any searching if the command is a list, and we don't
+ # need to search if the path is an absolute path.
+ if isinstance(command, list) or os.path.isabs(command):
+ if isinstance(command, str):
+ command = [command]
+ return ExternalProgram(name, command=command, silent=True)
+ assert isinstance(command, str)
+ # Search for the command using the specified string!
+ return ExternalProgram(command, silent=True)
+
+ @staticmethod
+ def _shebang_to_cmd(script: str) -> T.Optional[T.List[str]]:
+ """
+ Check if the file has a shebang and manually parse it to figure out
+ the interpreter to use. This is useful if the script is not executable
+ or if we're on Windows (which does not understand shebangs).
+ """
+ try:
+ with open(script, encoding='utf-8') as f:
+ first_line = f.readline().strip()
+ if first_line.startswith('#!'):
+ # In a shebang, everything before the first space is assumed to
+ # be the command to run and everything after the first space is
+ # the single argument to pass to that command. So we must split
+ # exactly once.
+ commands = first_line[2:].split('#')[0].strip().split(maxsplit=1)
+ if mesonlib.is_windows():
+ # Windows does not have UNIX paths so remove them,
+ # but don't remove Windows paths
+ if commands[0].startswith('/'):
+ commands[0] = commands[0].split('/')[-1]
+ if len(commands) > 0 and commands[0] == 'env':
+ commands = commands[1:]
+ # Windows does not ship python3.exe, but we know the path to it
+ if len(commands) > 0 and commands[0] == 'python3':
+ commands = mesonlib.python_command + commands[1:]
+ elif mesonlib.is_haiku():
+ # Haiku does not have /usr, but a lot of scripts assume that
+ # /usr/bin/env always exists. Detect that case and run the
+ # script with the interpreter after it.
+ if commands[0] == '/usr/bin/env':
+ commands = commands[1:]
+ # We know what python3 is, we're running on it
+ if len(commands) > 0 and commands[0] == 'python3':
+ commands = mesonlib.python_command + commands[1:]
+ else:
+ # Replace python3 with the actual python3 that we are using
+ if commands[0] == '/usr/bin/env' and commands[1] == 'python3':
+ commands = mesonlib.python_command + commands[2:]
+ elif commands[0].split('/')[-1] == 'python3':
+ commands = mesonlib.python_command + commands[1:]
+ return commands + [script]
+ except Exception as e:
+ mlog.debug(str(e))
+ mlog.debug(f'Unusable script {script!r}')
+ return None
+
+ def _is_executable(self, path: str) -> bool:
+ suffix = os.path.splitext(path)[-1].lower()[1:]
+ execmask = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
+ if mesonlib.is_windows():
+ if suffix in self.windows_exts:
+ return True
+ elif os.stat(path).st_mode & execmask:
+ return not os.path.isdir(path)
+ return False
+
+ def _search_dir(self, name: str, search_dir: T.Optional[str]) -> T.Optional[list]:
+ if search_dir is None:
+ return None
+ trial = os.path.join(search_dir, name)
+ if os.path.exists(trial):
+ if self._is_executable(trial):
+ return [trial]
+ # Now getting desperate. Maybe it is a script file that is
+ # a) not chmodded executable, or
+ # b) we are on windows so they can't be directly executed.
+ return self._shebang_to_cmd(trial)
+ else:
+ if mesonlib.is_windows():
+ for ext in self.windows_exts:
+ trial_ext = f'{trial}.{ext}'
+ if os.path.exists(trial_ext):
+ return [trial_ext]
+ return None
+
+ def _search_windows_special_cases(self, name: str, command: str) -> T.List[T.Optional[str]]:
+ '''
+ Lots of weird Windows quirks:
+ 1. PATH search for @name returns files with extensions from PATHEXT,
+ but only self.windows_exts are executable without an interpreter.
+ 2. @name might be an absolute path to an executable, but without the
+ extension. This works inside MinGW so people use it a lot.
+ 3. The script is specified without an extension, in which case we have
+ to manually search in PATH.
+ 4. More special-casing for the shebang inside the script.
+ '''
+ if command:
+ # On Windows, even if the PATH search returned a full path, we can't be
+ # sure that it can be run directly if it's not a native executable.
+ # For instance, interpreted scripts sometimes need to be run explicitly
+ # with an interpreter if the file association is not done properly.
+ name_ext = os.path.splitext(command)[1]
+ if name_ext[1:].lower() in self.windows_exts:
+ # Good, it can be directly executed
+ return [command]
+ # Try to extract the interpreter from the shebang
+ commands = self._shebang_to_cmd(command)
+ if commands:
+ return commands
+ return [None]
+ # Maybe the name is an absolute path to a native Windows
+ # executable, but without the extension. This is technically wrong,
+ # but many people do it because it works in the MinGW shell.
+ if os.path.isabs(name):
+ for ext in self.windows_exts:
+ command = f'{name}.{ext}'
+ if os.path.exists(command):
+ return [command]
+ # On Windows, interpreted scripts must have an extension otherwise they
+ # cannot be found by a standard PATH search. So we do a custom search
+ # where we manually search for a script with a shebang in PATH.
+ search_dirs = self._windows_sanitize_path(os.environ.get('PATH', '')).split(';')
+ for search_dir in search_dirs:
+ commands = self._search_dir(name, search_dir)
+ if commands:
+ return commands
+ return [None]
+
+ def _search(self, name: str, search_dir: T.Optional[str]) -> T.List[T.Optional[str]]:
+ '''
+ Search in the specified dir for the specified executable by name
+ and if not found search in PATH
+ '''
+ commands = self._search_dir(name, search_dir)
+ if commands:
+ return commands
+ # If there is a directory component, do not look in PATH
+ if os.path.dirname(name) and not os.path.isabs(name):
+ return [None]
+ # Do a standard search in PATH
+ path = os.environ.get('PATH', None)
+ if mesonlib.is_windows() and path:
+ path = self._windows_sanitize_path(path)
+ command = shutil.which(name, path=path)
+ if mesonlib.is_windows():
+ return self._search_windows_special_cases(name, command)
+ # On UNIX-like platforms, shutil.which() is enough to find
+ # all executables whether in PATH or with an absolute path
+ return [command]
+
+ def found(self) -> bool:
+ return self.command[0] is not None
+
+ def get_command(self) -> T.List[str]:
+ return self.command[:]
+
+ def get_path(self) -> T.Optional[str]:
+ return self.path
+
+ def get_name(self) -> str:
+ return self.name
+
+
+class NonExistingExternalProgram(ExternalProgram): # lgtm [py/missing-call-to-init]
+ "A program that will never exist"
+
+ def __init__(self, name: str = 'nonexistingprogram') -> None:
+ self.name = name
+ self.command = [None]
+ self.path = None
+
+ def __repr__(self) -> str:
+ r = '<{} {!r} -> {!r}>'
+ return r.format(self.__class__.__name__, self.name, self.command)
+
+ def found(self) -> bool:
+ return False
+
+
+class EmptyExternalProgram(ExternalProgram): # lgtm [py/missing-call-to-init]
+ '''
+ A program object that returns an empty list of commands. Used for cases
+ such as a cross file exe_wrapper to represent that it's not required.
+ '''
+
+ def __init__(self) -> None:
+ self.name = None
+ self.command = []
+ self.path = None
+
+ def __repr__(self) -> str:
+ r = '<{} {!r} -> {!r}>'
+ return r.format(self.__class__.__name__, self.name, self.command)
+
+ def found(self) -> bool:
+ return True
+
+
+class OverrideProgram(ExternalProgram):
+
+ """A script overriding a program."""
+
+
+def find_external_program(env: 'Environment', for_machine: MachineChoice, name: str,
+ display_name: str, default_names: T.List[str],
+ allow_default_for_cross: bool = True) -> T.Generator['ExternalProgram', None, None]:
+ """Find an external program, chcking the cross file plus any default options."""
+ # Lookup in cross or machine file.
+ potential_cmd = env.lookup_binary_entry(for_machine, name)
+ if potential_cmd is not None:
+ mlog.debug(f'{display_name} binary for {for_machine} specified from cross file, native file, '
+ f'or env var as {potential_cmd}')
+ yield ExternalProgram.from_entry(name, potential_cmd)
+ # We never fallback if the user-specified option is no good, so
+ # stop returning options.
+ return
+ mlog.debug(f'{display_name} binary missing from cross or native file, or env var undefined.')
+ # Fallback on hard-coded defaults, if a default binary is allowed for use
+ # with cross targets, or if this is not a cross target
+ if allow_default_for_cross or not (for_machine is MachineChoice.HOST and env.is_cross_build(for_machine)):
+ for potential_path in default_names:
+ mlog.debug(f'Trying a default {display_name} fallback at', potential_path)
+ yield ExternalProgram(potential_path, silent=True)
+ else:
+ mlog.debug('Default target is not allowed for cross use')
diff --git a/meson/mesonbuild/rewriter.py b/meson/mesonbuild/rewriter.py
new file mode 100644
index 000000000..3f661a4b6
--- /dev/null
+++ b/meson/mesonbuild/rewriter.py
@@ -0,0 +1,970 @@
+#!/usr/bin/env python3
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+# This tool is used to manipulate an existing Meson build definition.
+#
+# - add a file to a target
+# - remove files from a target
+# - move targets
+# - reindent?
+
+from .ast import IntrospectionInterpreter, build_target_functions, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstPrinter
+from mesonbuild.mesonlib import MesonException
+from . import mlog, environment
+from functools import wraps
+from .mparser import Token, ArrayNode, ArgumentNode, AssignmentNode, BaseNode, BooleanNode, ElementaryNode, IdNode, FunctionNode, StringNode
+import json, os, re, sys
+import typing as T
+
+class RewriterException(MesonException):
+ pass
+
+def add_arguments(parser, formatter=None):
+ parser.add_argument('-s', '--sourcedir', type=str, default='.', metavar='SRCDIR', help='Path to source directory.')
+ parser.add_argument('-V', '--verbose', action='store_true', default=False, help='Enable verbose output')
+ parser.add_argument('-S', '--skip-errors', dest='skip', action='store_true', default=False, help='Skip errors instead of aborting')
+ subparsers = parser.add_subparsers(dest='type', title='Rewriter commands', description='Rewrite command to execute')
+
+ # Target
+ tgt_parser = subparsers.add_parser('target', help='Modify a target', formatter_class=formatter)
+ tgt_parser.add_argument('-s', '--subdir', default='', dest='subdir', help='Subdirectory of the new target (only for the "add_target" action)')
+ tgt_parser.add_argument('--type', dest='tgt_type', choices=rewriter_keys['target']['target_type'][2], default='executable',
+ help='Type of the target to add (only for the "add_target" action)')
+ tgt_parser.add_argument('target', help='Name or ID of the target')
+ tgt_parser.add_argument('operation', choices=['add', 'rm', 'add_target', 'rm_target', 'info'],
+ help='Action to execute')
+ tgt_parser.add_argument('sources', nargs='*', help='Sources to add/remove')
+
+ # KWARGS
+ kw_parser = subparsers.add_parser('kwargs', help='Modify keyword arguments', formatter_class=formatter)
+ kw_parser.add_argument('operation', choices=rewriter_keys['kwargs']['operation'][2],
+ help='Action to execute')
+ kw_parser.add_argument('function', choices=list(rewriter_func_kwargs.keys()),
+ help='Function type to modify')
+ kw_parser.add_argument('id', help='ID of the function to modify (can be anything for "project")')
+ kw_parser.add_argument('kwargs', nargs='*', help='Pairs of keyword and value')
+
+ # Default options
+ def_parser = subparsers.add_parser('default-options', help='Modify the project default options', formatter_class=formatter)
+ def_parser.add_argument('operation', choices=rewriter_keys['default_options']['operation'][2],
+ help='Action to execute')
+ def_parser.add_argument('options', nargs='*', help='Key, value pairs of configuration option')
+
+ # JSON file/command
+ cmd_parser = subparsers.add_parser('command', help='Execute a JSON array of commands', formatter_class=formatter)
+ cmd_parser.add_argument('json', help='JSON string or file to execute')
+
+class RequiredKeys:
+ def __init__(self, keys):
+ self.keys = keys
+
+ def __call__(self, f):
+ @wraps(f)
+ def wrapped(*wrapped_args, **wrapped_kwargs):
+ assert(len(wrapped_args) >= 2)
+ cmd = wrapped_args[1]
+ for key, val in self.keys.items():
+ typ = val[0] # The type of the value
+ default = val[1] # The default value -- None is required
+ choices = val[2] # Valid choices -- None is for everything
+ if key not in cmd:
+ if default is not None:
+ cmd[key] = default
+ else:
+ raise RewriterException('Key "{}" is missing in object for {}'
+ .format(key, f.__name__))
+ if not isinstance(cmd[key], typ):
+ raise RewriterException('Invalid type of "{}". Required is {} but provided was {}'
+ .format(key, typ.__name__, type(cmd[key]).__name__))
+ if choices is not None:
+ assert(isinstance(choices, list))
+ if cmd[key] not in choices:
+ raise RewriterException('Invalid value of "{}": Possible values are {} but provided was "{}"'
+ .format(key, choices, cmd[key]))
+ return f(*wrapped_args, **wrapped_kwargs)
+
+ return wrapped
+
+class MTypeBase:
+ def __init__(self, node: T.Optional[BaseNode] = None):
+ if node is None:
+ self.node = self._new_node() # lgtm [py/init-calls-subclass] (node creation does not depend on base class state)
+ else:
+ self.node = node
+ self.node_type = None
+ for i in self.supported_nodes(): # lgtm [py/init-calls-subclass] (listing nodes does not depend on base class state)
+ if isinstance(self.node, i):
+ self.node_type = i
+
+ def _new_node(self):
+ # Overwrite in derived class
+ raise RewriterException('Internal error: _new_node of MTypeBase was called')
+
+ def can_modify(self):
+ return self.node_type is not None
+
+ def get_node(self):
+ return self.node
+
+ def supported_nodes(self):
+ # Overwrite in derived class
+ return []
+
+ def set_value(self, value):
+ # Overwrite in derived class
+ mlog.warning('Cannot set the value of type', mlog.bold(type(self).__name__), '--> skipping')
+
+ def add_value(self, value):
+ # Overwrite in derived class
+ mlog.warning('Cannot add a value of type', mlog.bold(type(self).__name__), '--> skipping')
+
+ def remove_value(self, value):
+ # Overwrite in derived class
+ mlog.warning('Cannot remove a value of type', mlog.bold(type(self).__name__), '--> skipping')
+
+ def remove_regex(self, value):
+ # Overwrite in derived class
+ mlog.warning('Cannot remove a regex in type', mlog.bold(type(self).__name__), '--> skipping')
+
+class MTypeStr(MTypeBase):
+ def __init__(self, node: T.Optional[BaseNode] = None):
+ super().__init__(node)
+
+ def _new_node(self):
+ return StringNode(Token('', '', 0, 0, 0, None, ''))
+
+ def supported_nodes(self):
+ return [StringNode]
+
+ def set_value(self, value):
+ self.node.value = str(value)
+
+class MTypeBool(MTypeBase):
+ def __init__(self, node: T.Optional[BaseNode] = None):
+ super().__init__(node)
+
+ def _new_node(self):
+ return BooleanNode(Token('', '', 0, 0, 0, None, False))
+
+ def supported_nodes(self):
+ return [BooleanNode]
+
+ def set_value(self, value):
+ self.node.value = bool(value)
+
+class MTypeID(MTypeBase):
+ def __init__(self, node: T.Optional[BaseNode] = None):
+ super().__init__(node)
+
+ def _new_node(self):
+ return IdNode(Token('', '', 0, 0, 0, None, ''))
+
+ def supported_nodes(self):
+ return [IdNode]
+
+ def set_value(self, value):
+ self.node.value = str(value)
+
+class MTypeList(MTypeBase):
+ def __init__(self, node: T.Optional[BaseNode] = None):
+ super().__init__(node)
+
+ def _new_node(self):
+ return ArrayNode(ArgumentNode(Token('', '', 0, 0, 0, None, '')), 0, 0, 0, 0)
+
+ def _new_element_node(self, value):
+ # Overwrite in derived class
+ raise RewriterException('Internal error: _new_element_node of MTypeList was called')
+
+ def _ensure_array_node(self):
+ if not isinstance(self.node, ArrayNode):
+ tmp = self.node
+ self.node = self._new_node()
+ self.node.args.arguments += [tmp]
+
+ def _check_is_equal(self, node, value) -> bool:
+ # Overwrite in derived class
+ return False
+
+ def _check_regex_matches(self, node, regex: str) -> bool:
+ # Overwrite in derived class
+ return False
+
+ def get_node(self):
+ if isinstance(self.node, ArrayNode):
+ if len(self.node.args.arguments) == 1:
+ return self.node.args.arguments[0]
+ return self.node
+
+ def supported_element_nodes(self):
+ # Overwrite in derived class
+ return []
+
+ def supported_nodes(self):
+ return [ArrayNode] + self.supported_element_nodes()
+
+ def set_value(self, value):
+ if not isinstance(value, list):
+ value = [value]
+ self._ensure_array_node()
+ self.node.args.arguments = [] # Remove all current nodes
+ for i in value:
+ self.node.args.arguments += [self._new_element_node(i)]
+
+ def add_value(self, value):
+ if not isinstance(value, list):
+ value = [value]
+ self._ensure_array_node()
+ for i in value:
+ self.node.args.arguments += [self._new_element_node(i)]
+
+ def _remove_helper(self, value, equal_func):
+ def check_remove_node(node):
+ for j in value:
+ if equal_func(i, j):
+ return True
+ return False
+
+ if not isinstance(value, list):
+ value = [value]
+ self._ensure_array_node()
+ removed_list = []
+ for i in self.node.args.arguments:
+ if not check_remove_node(i):
+ removed_list += [i]
+ self.node.args.arguments = removed_list
+
+ def remove_value(self, value):
+ self._remove_helper(value, self._check_is_equal)
+
+ def remove_regex(self, regex: str):
+ self._remove_helper(regex, self._check_regex_matches)
+
+class MTypeStrList(MTypeList):
+ def __init__(self, node: T.Optional[BaseNode] = None):
+ super().__init__(node)
+
+ def _new_element_node(self, value):
+ return StringNode(Token('', '', 0, 0, 0, None, str(value)))
+
+ def _check_is_equal(self, node, value) -> bool:
+ if isinstance(node, StringNode):
+ return node.value == value
+ return False
+
+ def _check_regex_matches(self, node, regex: str) -> bool:
+ if isinstance(node, StringNode):
+ return re.match(regex, node.value) is not None
+ return False
+
+ def supported_element_nodes(self):
+ return [StringNode]
+
+class MTypeIDList(MTypeList):
+ def __init__(self, node: T.Optional[BaseNode] = None):
+ super().__init__(node)
+
+ def _new_element_node(self, value):
+ return IdNode(Token('', '', 0, 0, 0, None, str(value)))
+
+ def _check_is_equal(self, node, value) -> bool:
+ if isinstance(node, IdNode):
+ return node.value == value
+ return False
+
+ def _check_regex_matches(self, node, regex: str) -> bool:
+ if isinstance(node, StringNode):
+ return re.match(regex, node.value) is not None
+ return False
+
+ def supported_element_nodes(self):
+ return [IdNode]
+
+rewriter_keys = {
+ 'default_options': {
+ 'operation': (str, None, ['set', 'delete']),
+ 'options': (dict, {}, None)
+ },
+ 'kwargs': {
+ 'function': (str, None, None),
+ 'id': (str, None, None),
+ 'operation': (str, None, ['set', 'delete', 'add', 'remove', 'remove_regex', 'info']),
+ 'kwargs': (dict, {}, None)
+ },
+ 'target': {
+ 'target': (str, None, None),
+ 'operation': (str, None, ['src_add', 'src_rm', 'target_rm', 'target_add', 'info']),
+ 'sources': (list, [], None),
+ 'subdir': (str, '', None),
+ 'target_type': (str, 'executable', ['both_libraries', 'executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library']),
+ }
+}
+
+rewriter_func_kwargs = {
+ 'dependency': {
+ 'language': MTypeStr,
+ 'method': MTypeStr,
+ 'native': MTypeBool,
+ 'not_found_message': MTypeStr,
+ 'required': MTypeBool,
+ 'static': MTypeBool,
+ 'version': MTypeStrList,
+ 'modules': MTypeStrList
+ },
+ 'target': {
+ 'build_by_default': MTypeBool,
+ 'build_rpath': MTypeStr,
+ 'dependencies': MTypeIDList,
+ 'gui_app': MTypeBool,
+ 'link_with': MTypeIDList,
+ 'export_dynamic': MTypeBool,
+ 'implib': MTypeBool,
+ 'install': MTypeBool,
+ 'install_dir': MTypeStr,
+ 'install_rpath': MTypeStr,
+ 'pie': MTypeBool
+ },
+ 'project': {
+ 'default_options': MTypeStrList,
+ 'meson_version': MTypeStr,
+ 'license': MTypeStrList,
+ 'subproject_dir': MTypeStr,
+ 'version': MTypeStr
+ }
+}
+
+class Rewriter:
+ def __init__(self, sourcedir: str, generator: str = 'ninja', skip_errors: bool = False):
+ self.sourcedir = sourcedir
+ self.interpreter = IntrospectionInterpreter(sourcedir, '', generator, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()])
+ self.skip_errors = skip_errors
+ self.modified_nodes = []
+ self.to_remove_nodes = []
+ self.to_add_nodes = []
+ self.functions = {
+ 'default_options': self.process_default_options,
+ 'kwargs': self.process_kwargs,
+ 'target': self.process_target,
+ }
+ self.info_dump = None
+
+ def analyze_meson(self):
+ mlog.log('Analyzing meson file:', mlog.bold(os.path.join(self.sourcedir, environment.build_filename)))
+ self.interpreter.analyze()
+ mlog.log(' -- Project:', mlog.bold(self.interpreter.project_data['descriptive_name']))
+ mlog.log(' -- Version:', mlog.cyan(self.interpreter.project_data['version']))
+
+ def add_info(self, cmd_type: str, cmd_id: str, data: dict):
+ if self.info_dump is None:
+ self.info_dump = {}
+ if cmd_type not in self.info_dump:
+ self.info_dump[cmd_type] = {}
+ self.info_dump[cmd_type][cmd_id] = data
+
+ def print_info(self):
+ if self.info_dump is None:
+ return
+ sys.stderr.write(json.dumps(self.info_dump, indent=2))
+
+ def on_error(self):
+ if self.skip_errors:
+ return mlog.cyan('-->'), mlog.yellow('skipping')
+ return mlog.cyan('-->'), mlog.red('aborting')
+
+ def handle_error(self):
+ if self.skip_errors:
+ return None
+ raise MesonException('Rewriting the meson.build failed')
+
+ def find_target(self, target: str):
+ def check_list(name: str) -> T.List[BaseNode]:
+ result = []
+ for i in self.interpreter.targets:
+ if name == i['name'] or name == i['id']:
+ result += [i]
+ return result
+
+ targets = check_list(target)
+ if targets:
+ if len(targets) == 1:
+ return targets[0]
+ else:
+ mlog.error('There are multiple targets matching', mlog.bold(target))
+ for i in targets:
+ mlog.error(' -- Target name', mlog.bold(i['name']), 'with ID', mlog.bold(i['id']))
+ mlog.error('Please try again with the unique ID of the target', *self.on_error())
+ self.handle_error()
+ return None
+
+ # Check the assignments
+ tgt = None
+ if target in self.interpreter.assignments:
+ node = self.interpreter.assignments[target]
+ if isinstance(node, FunctionNode):
+ if node.func_name in ['executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library', 'both_libraries']:
+ tgt = self.interpreter.assign_vals[target]
+
+ return tgt
+
+ def find_dependency(self, dependency: str):
+ def check_list(name: str):
+ for i in self.interpreter.dependencies:
+ if name == i['name']:
+ return i
+ return None
+
+ dep = check_list(dependency)
+ if dep is not None:
+ return dep
+
+ # Check the assignments
+ if dependency in self.interpreter.assignments:
+ node = self.interpreter.assignments[dependency]
+ if isinstance(node, FunctionNode):
+ if node.func_name in ['dependency']:
+ name = self.interpreter.flatten_args(node.args)[0]
+ dep = check_list(name)
+
+ return dep
+
+ @RequiredKeys(rewriter_keys['default_options'])
+ def process_default_options(self, cmd):
+ # First, remove the old values
+ kwargs_cmd = {
+ 'function': 'project',
+ 'id': "/",
+ 'operation': 'remove_regex',
+ 'kwargs': {
+ 'default_options': [f'{x}=.*' for x in cmd['options'].keys()]
+ }
+ }
+ self.process_kwargs(kwargs_cmd)
+
+ # Then add the new values
+ if cmd['operation'] != 'set':
+ return
+
+ kwargs_cmd['operation'] = 'add'
+ kwargs_cmd['kwargs']['default_options'] = []
+
+ cdata = self.interpreter.coredata
+ options = {
+ **{str(k): v for k, v in cdata.options.items()},
+ **{str(k): v for k, v in cdata.options.items()},
+ **{str(k): v for k, v in cdata.options.items()},
+ **{str(k): v for k, v in cdata.options.items()},
+ **{str(k): v for k, v in cdata.options.items()},
+ }
+
+ for key, val in sorted(cmd['options'].items()):
+ if key not in options:
+ mlog.error('Unknown options', mlog.bold(key), *self.on_error())
+ self.handle_error()
+ continue
+
+ try:
+ val = options[key].validate_value(val)
+ except MesonException as e:
+ mlog.error('Unable to set', mlog.bold(key), mlog.red(str(e)), *self.on_error())
+ self.handle_error()
+ continue
+
+ kwargs_cmd['kwargs']['default_options'] += [f'{key}={val}']
+
+ self.process_kwargs(kwargs_cmd)
+
+ @RequiredKeys(rewriter_keys['kwargs'])
+ def process_kwargs(self, cmd):
+ mlog.log('Processing function type', mlog.bold(cmd['function']), 'with id', mlog.cyan("'" + cmd['id'] + "'"))
+ if cmd['function'] not in rewriter_func_kwargs:
+ mlog.error('Unknown function type', cmd['function'], *self.on_error())
+ return self.handle_error()
+ kwargs_def = rewriter_func_kwargs[cmd['function']]
+
+ # Find the function node to modify
+ node = None
+ arg_node = None
+ if cmd['function'] == 'project':
+ # msys bash may expand '/' to a path. It will mangle '//' to '/'
+ # but in order to keep usage shell-agnostic, also allow `//` as
+ # the function ID such that it will work in both msys bash and
+ # other shells.
+ if {'/', '//'}.isdisjoint({cmd['id']}):
+ mlog.error('The ID for the function type project must be "/" or "//" not "' + cmd['id'] + '"', *self.on_error())
+ return self.handle_error()
+ node = self.interpreter.project_node
+ arg_node = node.args
+ elif cmd['function'] == 'target':
+ tmp = self.find_target(cmd['id'])
+ if tmp:
+ node = tmp['node']
+ arg_node = node.args
+ elif cmd['function'] == 'dependency':
+ tmp = self.find_dependency(cmd['id'])
+ if tmp:
+ node = tmp['node']
+ arg_node = node.args
+ if not node:
+ mlog.error('Unable to find the function node')
+ assert(isinstance(node, FunctionNode))
+ assert(isinstance(arg_node, ArgumentNode))
+ # Transform the key nodes to plain strings
+ arg_node.kwargs = {k.value: v for k, v in arg_node.kwargs.items()}
+
+ # Print kwargs info
+ if cmd['operation'] == 'info':
+ info_data = {}
+ for key, val in sorted(arg_node.kwargs.items()):
+ info_data[key] = None
+ if isinstance(val, ElementaryNode):
+ info_data[key] = val.value
+ elif isinstance(val, ArrayNode):
+ data_list = []
+ for i in val.args.arguments:
+ element = None
+ if isinstance(i, ElementaryNode):
+ element = i.value
+ data_list += [element]
+ info_data[key] = data_list
+
+ self.add_info('kwargs', '{}#{}'.format(cmd['function'], cmd['id']), info_data)
+ return # Nothing else to do
+
+ # Modify the kwargs
+ num_changed = 0
+ for key, val in sorted(cmd['kwargs'].items()):
+ if key not in kwargs_def:
+ mlog.error('Cannot modify unknown kwarg', mlog.bold(key), *self.on_error())
+ self.handle_error()
+ continue
+
+ # Remove the key from the kwargs
+ if cmd['operation'] == 'delete':
+ if key in arg_node.kwargs:
+ mlog.log(' -- Deleting', mlog.bold(key), 'from the kwargs')
+ del arg_node.kwargs[key]
+ num_changed += 1
+ else:
+ mlog.log(' -- Key', mlog.bold(key), 'is already deleted')
+ continue
+
+ if key not in arg_node.kwargs:
+ arg_node.kwargs[key] = None
+ modifyer = kwargs_def[key](arg_node.kwargs[key])
+ if not modifyer.can_modify():
+ mlog.log(' -- Skipping', mlog.bold(key), 'because it is to complex to modify')
+
+ # Apply the operation
+ val_str = str(val)
+ if cmd['operation'] == 'set':
+ mlog.log(' -- Setting', mlog.bold(key), 'to', mlog.yellow(val_str))
+ modifyer.set_value(val)
+ elif cmd['operation'] == 'add':
+ mlog.log(' -- Adding', mlog.yellow(val_str), 'to', mlog.bold(key))
+ modifyer.add_value(val)
+ elif cmd['operation'] == 'remove':
+ mlog.log(' -- Removing', mlog.yellow(val_str), 'from', mlog.bold(key))
+ modifyer.remove_value(val)
+ elif cmd['operation'] == 'remove_regex':
+ mlog.log(' -- Removing all values matching', mlog.yellow(val_str), 'from', mlog.bold(key))
+ modifyer.remove_regex(val)
+
+ # Write back the result
+ arg_node.kwargs[key] = modifyer.get_node()
+ num_changed += 1
+
+ # Convert the keys back to IdNode's
+ arg_node.kwargs = {IdNode(Token('', '', 0, 0, 0, None, k)): v for k, v in arg_node.kwargs.items()}
+ if num_changed > 0 and node not in self.modified_nodes:
+ self.modified_nodes += [node]
+
+ def find_assignment_node(self, node: BaseNode) -> AssignmentNode:
+ if node.ast_id and node.ast_id in self.interpreter.reverse_assignment:
+ return self.interpreter.reverse_assignment[node.ast_id]
+ return None
+
+ @RequiredKeys(rewriter_keys['target'])
+ def process_target(self, cmd):
+ mlog.log('Processing target', mlog.bold(cmd['target']), 'operation', mlog.cyan(cmd['operation']))
+ target = self.find_target(cmd['target'])
+ if target is None and cmd['operation'] != 'target_add':
+ mlog.error('Unknown target', mlog.bold(cmd['target']), *self.on_error())
+ return self.handle_error()
+
+ # Make source paths relative to the current subdir
+ def rel_source(src: str) -> str:
+ subdir = os.path.abspath(os.path.join(self.sourcedir, target['subdir']))
+ if os.path.isabs(src):
+ return os.path.relpath(src, subdir)
+ elif not os.path.exists(src):
+ return src # Trust the user when the source doesn't exist
+ # Make sure that the path is relative to the subdir
+ return os.path.relpath(os.path.abspath(src), subdir)
+
+ if target is not None:
+ cmd['sources'] = [rel_source(x) for x in cmd['sources']]
+
+ # Utility function to get a list of the sources from a node
+ def arg_list_from_node(n):
+ args = []
+ if isinstance(n, FunctionNode):
+ args = list(n.args.arguments)
+ if n.func_name in build_target_functions:
+ args.pop(0)
+ elif isinstance(n, ArrayNode):
+ args = n.args.arguments
+ elif isinstance(n, ArgumentNode):
+ args = n.arguments
+ return args
+
+ to_sort_nodes = []
+
+ if cmd['operation'] == 'src_add':
+ node = None
+ if target['sources']:
+ node = target['sources'][0]
+ else:
+ node = target['node']
+ assert(node is not None)
+
+ # Generate the current source list
+ src_list = []
+ for i in target['sources']:
+ for j in arg_list_from_node(i):
+ if isinstance(j, StringNode):
+ src_list += [j.value]
+
+ # Generate the new String nodes
+ to_append = []
+ for i in sorted(set(cmd['sources'])):
+ if i in src_list:
+ mlog.log(' -- Source', mlog.green(i), 'is already defined for the target --> skipping')
+ continue
+ mlog.log(' -- Adding source', mlog.green(i), 'at',
+ mlog.yellow(f'{node.filename}:{node.lineno}'))
+ token = Token('string', node.filename, 0, 0, 0, None, i)
+ to_append += [StringNode(token)]
+
+ # Append to the AST at the right place
+ arg_node = None
+ if isinstance(node, (FunctionNode, ArrayNode)):
+ arg_node = node.args
+ elif isinstance(node, ArgumentNode):
+ arg_node = node
+ assert(arg_node is not None)
+ arg_node.arguments += to_append
+
+ # Mark the node as modified
+ if arg_node not in to_sort_nodes and not isinstance(node, FunctionNode):
+ to_sort_nodes += [arg_node]
+ if node not in self.modified_nodes:
+ self.modified_nodes += [node]
+
+ elif cmd['operation'] == 'src_rm':
+ # Helper to find the exact string node and its parent
+ def find_node(src):
+ for i in target['sources']:
+ for j in arg_list_from_node(i):
+ if isinstance(j, StringNode):
+ if j.value == src:
+ return i, j
+ return None, None
+
+ for i in cmd['sources']:
+ # Try to find the node with the source string
+ root, string_node = find_node(i)
+ if root is None:
+ mlog.warning(' -- Unable to find source', mlog.green(i), 'in the target')
+ continue
+
+ # Remove the found string node from the argument list
+ arg_node = None
+ if isinstance(root, (FunctionNode, ArrayNode)):
+ arg_node = root.args
+ elif isinstance(root, ArgumentNode):
+ arg_node = root
+ assert(arg_node is not None)
+ mlog.log(' -- Removing source', mlog.green(i), 'from',
+ mlog.yellow(f'{string_node.filename}:{string_node.lineno}'))
+ arg_node.arguments.remove(string_node)
+
+ # Mark the node as modified
+ if arg_node not in to_sort_nodes and not isinstance(root, FunctionNode):
+ to_sort_nodes += [arg_node]
+ if root not in self.modified_nodes:
+ self.modified_nodes += [root]
+
+ elif cmd['operation'] == 'target_add':
+ if target is not None:
+ mlog.error('Can not add target', mlog.bold(cmd['target']), 'because it already exists', *self.on_error())
+ return self.handle_error()
+
+ id_base = re.sub(r'[- ]', '_', cmd['target'])
+ target_id = id_base + '_exe' if cmd['target_type'] == 'executable' else '_lib'
+ source_id = id_base + '_sources'
+ filename = os.path.join(cmd['subdir'], environment.build_filename)
+
+ # Build src list
+ src_arg_node = ArgumentNode(Token('string', filename, 0, 0, 0, None, ''))
+ src_arr_node = ArrayNode(src_arg_node, 0, 0, 0, 0)
+ src_far_node = ArgumentNode(Token('string', filename, 0, 0, 0, None, ''))
+ src_fun_node = FunctionNode(filename, 0, 0, 0, 0, 'files', src_far_node)
+ src_ass_node = AssignmentNode(filename, 0, 0, source_id, src_fun_node)
+ src_arg_node.arguments = [StringNode(Token('string', filename, 0, 0, 0, None, x)) for x in cmd['sources']]
+ src_far_node.arguments = [src_arr_node]
+
+ # Build target
+ tgt_arg_node = ArgumentNode(Token('string', filename, 0, 0, 0, None, ''))
+ tgt_fun_node = FunctionNode(filename, 0, 0, 0, 0, cmd['target_type'], tgt_arg_node)
+ tgt_ass_node = AssignmentNode(filename, 0, 0, target_id, tgt_fun_node)
+ tgt_arg_node.arguments = [
+ StringNode(Token('string', filename, 0, 0, 0, None, cmd['target'])),
+ IdNode(Token('string', filename, 0, 0, 0, None, source_id))
+ ]
+
+ src_ass_node.accept(AstIndentationGenerator())
+ tgt_ass_node.accept(AstIndentationGenerator())
+ self.to_add_nodes += [src_ass_node, tgt_ass_node]
+
+ elif cmd['operation'] == 'target_rm':
+ to_remove = self.find_assignment_node(target['node'])
+ if to_remove is None:
+ to_remove = target['node']
+ self.to_remove_nodes += [to_remove]
+ mlog.log(' -- Removing target', mlog.green(cmd['target']), 'at',
+ mlog.yellow(f'{to_remove.filename}:{to_remove.lineno}'))
+
+ elif cmd['operation'] == 'info':
+ # T.List all sources in the target
+ src_list = []
+ for i in target['sources']:
+ for j in arg_list_from_node(i):
+ if isinstance(j, StringNode):
+ src_list += [j.value]
+ test_data = {
+ 'name': target['name'],
+ 'sources': src_list
+ }
+ self.add_info('target', target['id'], test_data)
+
+ # Sort files
+ for i in to_sort_nodes:
+ convert = lambda text: int(text) if text.isdigit() else text.lower()
+ alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
+ path_sorter = lambda key: ([(key.count('/') <= idx, alphanum_key(x)) for idx, x in enumerate(key.split('/'))])
+
+ unknown = [x for x in i.arguments if not isinstance(x, StringNode)]
+ sources = [x for x in i.arguments if isinstance(x, StringNode)]
+ sources = sorted(sources, key=lambda x: path_sorter(x.value))
+ i.arguments = unknown + sources
+
+ def process(self, cmd):
+ if 'type' not in cmd:
+ raise RewriterException('Command has no key "type"')
+ if cmd['type'] not in self.functions:
+ raise RewriterException('Unknown command "{}". Supported commands are: {}'
+ .format(cmd['type'], list(self.functions.keys())))
+ self.functions[cmd['type']](cmd)
+
+ def apply_changes(self):
+ assert(all(hasattr(x, 'lineno') and hasattr(x, 'colno') and hasattr(x, 'filename') for x in self.modified_nodes))
+ assert(all(hasattr(x, 'lineno') and hasattr(x, 'colno') and hasattr(x, 'filename') for x in self.to_remove_nodes))
+ assert(all(isinstance(x, (ArrayNode, FunctionNode)) for x in self.modified_nodes))
+ assert(all(isinstance(x, (ArrayNode, AssignmentNode, FunctionNode)) for x in self.to_remove_nodes))
+ # Sort based on line and column in reversed order
+ work_nodes = [{'node': x, 'action': 'modify'} for x in self.modified_nodes]
+ work_nodes += [{'node': x, 'action': 'rm'} for x in self.to_remove_nodes]
+ work_nodes = list(sorted(work_nodes, key=lambda x: (x['node'].lineno, x['node'].colno), reverse=True))
+ work_nodes += [{'node': x, 'action': 'add'} for x in self.to_add_nodes]
+
+ # Generating the new replacement string
+ str_list = []
+ for i in work_nodes:
+ new_data = ''
+ if i['action'] == 'modify' or i['action'] == 'add':
+ printer = AstPrinter()
+ i['node'].accept(printer)
+ printer.post_process()
+ new_data = printer.result.strip()
+ data = {
+ 'file': i['node'].filename,
+ 'str': new_data,
+ 'node': i['node'],
+ 'action': i['action']
+ }
+ str_list += [data]
+
+ # Load build files
+ files = {}
+ for i in str_list:
+ if i['file'] in files:
+ continue
+ fpath = os.path.realpath(os.path.join(self.sourcedir, i['file']))
+ fdata = ''
+ # Create an empty file if it does not exist
+ if not os.path.exists(fpath):
+ with open(fpath, 'w', encoding='utf-8'):
+ pass
+ with open(fpath, encoding='utf-8') as fp:
+ fdata = fp.read()
+
+ # Generate line offsets numbers
+ m_lines = fdata.splitlines(True)
+ offset = 0
+ line_offsets = []
+ for j in m_lines:
+ line_offsets += [offset]
+ offset += len(j)
+
+ files[i['file']] = {
+ 'path': fpath,
+ 'raw': fdata,
+ 'offsets': line_offsets
+ }
+
+ # Replace in source code
+ def remove_node(i):
+ offsets = files[i['file']]['offsets']
+ raw = files[i['file']]['raw']
+ node = i['node']
+ line = node.lineno - 1
+ col = node.colno
+ start = offsets[line] + col
+ end = start
+ if isinstance(node, (ArrayNode, FunctionNode)):
+ end = offsets[node.end_lineno - 1] + node.end_colno
+
+ # Only removal is supported for assignments
+ elif isinstance(node, AssignmentNode) and i['action'] == 'rm':
+ if isinstance(node.value, (ArrayNode, FunctionNode)):
+ remove_node({'file': i['file'], 'str': '', 'node': node.value, 'action': 'rm'})
+ raw = files[i['file']]['raw']
+ while raw[end] != '=':
+ end += 1
+ end += 1 # Handle the '='
+ while raw[end] in [' ', '\n', '\t']:
+ end += 1
+
+ files[i['file']]['raw'] = raw[:start] + i['str'] + raw[end:]
+
+ for i in str_list:
+ if i['action'] in ['modify', 'rm']:
+ remove_node(i)
+ elif i['action'] in ['add']:
+ files[i['file']]['raw'] += i['str'] + '\n'
+
+ # Write the files back
+ for key, val in files.items():
+ mlog.log('Rewriting', mlog.yellow(key))
+ with open(val['path'], 'w', encoding='utf-8') as fp:
+ fp.write(val['raw'])
+
+target_operation_map = {
+ 'add': 'src_add',
+ 'rm': 'src_rm',
+ 'add_target': 'target_add',
+ 'rm_target': 'target_rm',
+ 'info': 'info',
+}
+
+def list_to_dict(in_list: T.List[str]) -> T.Dict[str, str]:
+ result = {}
+ it = iter(in_list)
+ try:
+ for i in it:
+ # calling next(it) is not a mistake, we're taking the next element from
+ # the iterator, avoiding the need to preprocess it into a sequence of
+ # key value pairs.
+ result[i] = next(it)
+ except StopIteration:
+ raise TypeError('in_list parameter of list_to_dict must have an even length.')
+ return result
+
+def generate_target(options) -> T.List[dict]:
+ return [{
+ 'type': 'target',
+ 'target': options.target,
+ 'operation': target_operation_map[options.operation],
+ 'sources': options.sources,
+ 'subdir': options.subdir,
+ 'target_type': options.tgt_type,
+ }]
+
+def generate_kwargs(options) -> T.List[dict]:
+ return [{
+ 'type': 'kwargs',
+ 'function': options.function,
+ 'id': options.id,
+ 'operation': options.operation,
+ 'kwargs': list_to_dict(options.kwargs),
+ }]
+
+def generate_def_opts(options) -> T.List[dict]:
+ return [{
+ 'type': 'default_options',
+ 'operation': options.operation,
+ 'options': list_to_dict(options.options),
+ }]
+
+def generate_cmd(options) -> T.List[dict]:
+ if os.path.exists(options.json):
+ with open(options.json, encoding='utf-8') as fp:
+ return json.load(fp)
+ else:
+ return json.loads(options.json)
+
+# Map options.type to the actual type name
+cli_type_map = {
+ 'target': generate_target,
+ 'tgt': generate_target,
+ 'kwargs': generate_kwargs,
+ 'default-options': generate_def_opts,
+ 'def': generate_def_opts,
+ 'command': generate_cmd,
+ 'cmd': generate_cmd,
+}
+
+def run(options):
+ if not options.verbose:
+ mlog.set_quiet()
+
+ try:
+ rewriter = Rewriter(options.sourcedir, skip_errors=options.skip)
+ rewriter.analyze_meson()
+
+ if options.type is None:
+ mlog.error('No command specified')
+ return 1
+
+ commands = cli_type_map[options.type](options)
+
+ if not isinstance(commands, list):
+ raise TypeError('Command is not a list')
+
+ for i in commands:
+ if not isinstance(i, object):
+ raise TypeError('Command is not an object')
+ rewriter.process(i)
+
+ rewriter.apply_changes()
+ rewriter.print_info()
+ return 0
+ except Exception as e:
+ raise e
+ finally:
+ mlog.set_verbose()
diff --git a/meson/mesonbuild/scripts/__init__.py b/meson/mesonbuild/scripts/__init__.py
new file mode 100644
index 000000000..2edbe8899
--- /dev/null
+++ b/meson/mesonbuild/scripts/__init__.py
@@ -0,0 +1,21 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# TODO: consider switching to pathlib for this
+def destdir_join(d1: str, d2: str) -> str:
+ # c:\destdir + c:\prefix must produce c:\destdir\prefix
+ if len(d1) > 1 and d1[1] == ':' \
+ and len(d2) > 1 and d2[1] == ':':
+ return d1 + d2[2:]
+ return d1 + d2
diff --git a/meson/mesonbuild/scripts/clangformat.py b/meson/mesonbuild/scripts/clangformat.py
new file mode 100644
index 000000000..8e61b5591
--- /dev/null
+++ b/meson/mesonbuild/scripts/clangformat.py
@@ -0,0 +1,91 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import subprocess
+import itertools
+import fnmatch
+from pathlib import Path
+from concurrent.futures import ThreadPoolExecutor
+
+from ..environment import detect_clangformat
+from ..compilers import lang_suffixes
+import typing as T
+
+def parse_pattern_file(fname: Path) -> T.List[str]:
+ patterns = []
+ try:
+ with fname.open(encoding='utf-8') as f:
+ for line in f:
+ pattern = line.strip()
+ if pattern and not pattern.startswith('#'):
+ patterns.append(pattern)
+ except FileNotFoundError:
+ pass
+ return patterns
+
+def run_clang_format(exelist: T.List[str], fname: Path, check: bool) -> subprocess.CompletedProcess:
+ if check:
+ original = fname.read_bytes()
+ before = fname.stat().st_mtime
+ args = ['-style=file', '-i', str(fname)]
+ ret = subprocess.run(exelist + args)
+ after = fname.stat().st_mtime
+ if before != after:
+ print('File reformatted: ', fname)
+ if check:
+ # Restore the original if only checking.
+ fname.write_bytes(original)
+ ret.returncode = 1
+ return ret
+
+def clangformat(exelist: T.List[str], srcdir: Path, builddir: Path, check: bool) -> int:
+ patterns = parse_pattern_file(srcdir / '.clang-format-include')
+ if not patterns:
+ patterns = ['**/*']
+ globs = [srcdir.glob(p) for p in patterns]
+ patterns = parse_pattern_file(srcdir / '.clang-format-ignore')
+ ignore = [str(builddir / '*')]
+ ignore.extend([str(srcdir / p) for p in patterns])
+ suffixes = set(lang_suffixes['c']).union(set(lang_suffixes['cpp']))
+ suffixes.add('h')
+ suffixes = {f'.{s}' for s in suffixes}
+ futures = []
+ returncode = 0
+ with ThreadPoolExecutor() as e:
+ for f in itertools.chain(*globs):
+ strf = str(f)
+ if f.is_dir() or f.suffix not in suffixes or \
+ any(fnmatch.fnmatch(strf, i) for i in ignore):
+ continue
+ futures.append(e.submit(run_clang_format, exelist, f, check))
+ returncode = max([x.result().returncode for x in futures])
+ return returncode
+
+def run(args: T.List[str]) -> int:
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--check', action='store_true')
+ parser.add_argument('sourcedir')
+ parser.add_argument('builddir')
+ options = parser.parse_args(args)
+
+ srcdir = Path(options.sourcedir)
+ builddir = Path(options.builddir)
+
+ exelist = detect_clangformat()
+ if not exelist:
+ print('Could not execute clang-format "%s"' % ' '.join(exelist))
+ return 1
+
+ return clangformat(exelist, srcdir, builddir, options.check)
diff --git a/meson/mesonbuild/scripts/clangtidy.py b/meson/mesonbuild/scripts/clangtidy.py
new file mode 100644
index 000000000..8d366c84d
--- /dev/null
+++ b/meson/mesonbuild/scripts/clangtidy.py
@@ -0,0 +1,57 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pathlib
+import subprocess
+import shutil
+import os
+import re
+from concurrent.futures import ThreadPoolExecutor
+import typing as T
+
+from ..compilers import lang_suffixes
+
+def manual_clangtidy(srcdir_name: str, builddir_name: str) -> int:
+ srcdir = pathlib.Path(srcdir_name)
+ suffixes = set(lang_suffixes['c']).union(set(lang_suffixes['cpp']))
+ suffixes.add('h')
+ futures = []
+ returncode = 0
+ with ThreadPoolExecutor() as e:
+ for f in (x for suff in suffixes for x in srcdir.glob('**/*.' + suff)):
+ if f.is_dir():
+ continue
+ strf = str(f)
+ if strf.startswith(builddir_name):
+ continue
+ futures.append(e.submit(subprocess.run, ['clang-tidy', '-p', builddir_name, strf]))
+ returncode = max([x.result().returncode for x in futures])
+ return returncode
+
+def clangtidy(srcdir_name: str, builddir_name: str) -> int:
+ run_clang_tidy = None
+ for rct in ('run-clang-tidy', 'run-clang-tidy.py'):
+ if shutil.which(rct):
+ run_clang_tidy = rct
+ break
+ if run_clang_tidy:
+ return subprocess.run([run_clang_tidy, '-p', builddir_name, '^(?!' + re.escape(builddir_name + os.path.sep) +').*$']).returncode
+ else:
+ print('Could not find run-clang-tidy, running checks manually.')
+ return manual_clangtidy(srcdir_name, builddir_name)
+
+def run(args: T.List[str]) -> int:
+ srcdir_name = args[0]
+ builddir_name = args[1]
+ return clangtidy(srcdir_name, builddir_name)
diff --git a/meson/mesonbuild/scripts/cleantrees.py b/meson/mesonbuild/scripts/cleantrees.py
new file mode 100644
index 000000000..1a387538e
--- /dev/null
+++ b/meson/mesonbuild/scripts/cleantrees.py
@@ -0,0 +1,44 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import shutil
+import pickle
+import typing as T
+
+def rmtrees(build_dir: str, trees: T.List[str]) -> None:
+ for t in trees:
+ # Never delete trees outside of the builddir
+ if os.path.isabs(t):
+ print(f'Cannot delete dir with absolute path {t!r}')
+ continue
+ bt = os.path.join(build_dir, t)
+ # Skip if it doesn't exist, or if it is not a directory
+ if os.path.isdir(bt):
+ shutil.rmtree(bt, ignore_errors=True)
+
+def run(args: T.List[str]) -> int:
+ if len(args) != 1:
+ print('Cleaner script for Meson. Do not run on your own please.')
+ print('cleantrees.py <data-file>')
+ return 1
+ with open(args[0], 'rb') as f:
+ data = pickle.load(f)
+ rmtrees(data.build_dir, data.trees)
+ # Never fail cleaning
+ return 0
+
+if __name__ == '__main__':
+ run(sys.argv[1:])
diff --git a/meson/mesonbuild/scripts/cmake_run_ctgt.py b/meson/mesonbuild/scripts/cmake_run_ctgt.py
new file mode 100755
index 000000000..dfb70d10f
--- /dev/null
+++ b/meson/mesonbuild/scripts/cmake_run_ctgt.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python3
+
+import argparse
+import subprocess
+import shutil
+import sys
+from pathlib import Path
+import typing as T
+
+def run(argsv: T.List[str]) -> int:
+ commands = [[]] # type: T.List[T.List[str]]
+ SEPARATOR = ';;;'
+
+ # Generate CMD parameters
+ parser = argparse.ArgumentParser(description='Wrapper for add_custom_command')
+ parser.add_argument('-d', '--directory', type=str, metavar='D', required=True, help='Working directory to cwd to')
+ parser.add_argument('-o', '--outputs', nargs='+', metavar='O', required=True, help='Expected output files')
+ parser.add_argument('-O', '--original-outputs', nargs='*', metavar='O', default=[], help='Output files expected by CMake')
+ parser.add_argument('commands', nargs=argparse.REMAINDER, help=f'A "{SEPARATOR}" separated list of commands')
+
+ # Parse
+ args = parser.parse_args(argsv)
+ directory = Path(args.directory)
+
+ dummy_target = None
+ if len(args.outputs) == 1 and len(args.original_outputs) == 0:
+ dummy_target = Path(args.outputs[0])
+ elif len(args.outputs) != len(args.original_outputs):
+ print('Length of output list and original output list differ')
+ return 1
+
+ for i in args.commands:
+ if i == SEPARATOR:
+ commands += [[]]
+ continue
+
+ i = i.replace('"', '') # Remove lefover quotes
+ commands[-1] += [i]
+
+ # Execute
+ for i in commands:
+ # Skip empty lists
+ if not i:
+ continue
+
+ cmd = []
+ stdout = None
+ stderr = None
+ capture_file = ''
+
+ for j in i:
+ if j in ['>', '>>']:
+ stdout = subprocess.PIPE
+ continue
+ elif j in ['&>', '&>>']:
+ stdout = subprocess.PIPE
+ stderr = subprocess.STDOUT
+ continue
+
+ if stdout is not None or stderr is not None:
+ capture_file += j
+ else:
+ cmd += [j]
+
+ try:
+ directory.mkdir(parents=True, exist_ok=True)
+
+ res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=str(directory), check=True)
+ if capture_file:
+ out_file = directory / capture_file
+ out_file.write_bytes(res.stdout)
+ except subprocess.CalledProcessError:
+ return 1
+
+ if dummy_target:
+ dummy_target.touch()
+ return 0
+
+ # Copy outputs
+ zipped_outputs = zip([Path(x) for x in args.outputs], [Path(x) for x in args.original_outputs])
+ for expected, generated in zipped_outputs:
+ do_copy = False
+ if not expected.exists():
+ if not generated.exists():
+ print('Unable to find generated file. This can cause the build to fail:')
+ print(generated)
+ do_copy = False
+ else:
+ do_copy = True
+ elif generated.exists():
+ if generated.stat().st_mtime > expected.stat().st_mtime:
+ do_copy = True
+
+ if do_copy:
+ if expected.exists():
+ expected.unlink()
+ shutil.copyfile(str(generated), str(expected))
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
diff --git a/meson/mesonbuild/scripts/cmd_or_ps.ps1 b/meson/mesonbuild/scripts/cmd_or_ps.ps1
new file mode 100644
index 000000000..ccef8e84d
--- /dev/null
+++ b/meson/mesonbuild/scripts/cmd_or_ps.ps1
@@ -0,0 +1,22 @@
+# Copyied from GStreamer project
+# Author: Seungha Yang <seungha.yang@navercorp.com>
+
+$i=1
+$ppid=(gwmi win32_process -Filter "processid='$pid'").parentprocessid
+$pname=(Get-Process -id $ppid).Name
+While($true) {
+ if($pname -eq "cmd" -Or $pname -eq "powershell") {
+ Write-Host ("{0}.exe" -f $pname)
+ Break
+ }
+
+ # 10 times iteration seems to be sufficient
+ if($i -gt 10) {
+ Break
+ }
+
+ # not found yet, find grand parant
+ $ppid=(gwmi win32_process -Filter "processid='$ppid'").parentprocessid
+ $pname=(Get-Process -id $ppid).Name
+ $i++
+}
diff --git a/meson/mesonbuild/scripts/coverage.py b/meson/mesonbuild/scripts/coverage.py
new file mode 100644
index 000000000..5d552c301
--- /dev/null
+++ b/meson/mesonbuild/scripts/coverage.py
@@ -0,0 +1,173 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mesonbuild import environment, mesonlib
+
+import argparse, re, sys, os, subprocess, pathlib, stat
+import typing as T
+
+def coverage(outputs: T.List[str], source_root: str, subproject_root: str, build_root: str, log_dir: str, use_llvm_cov: bool) -> int:
+ outfiles = []
+ exitcode = 0
+
+ (gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe, llvm_cov_exe) = environment.find_coverage_tools()
+
+ # gcovr >= 4.2 requires a different syntax for out of source builds
+ if gcovr_new_rootdir:
+ gcovr_base_cmd = [gcovr_exe, '-r', source_root, build_root]
+ else:
+ gcovr_base_cmd = [gcovr_exe, '-r', build_root]
+
+ if use_llvm_cov:
+ gcov_exe_args = ['--gcov-executable', llvm_cov_exe + ' gcov']
+ else:
+ gcov_exe_args = []
+
+ if not outputs or 'xml' in outputs:
+ if gcovr_exe:
+ subprocess.check_call(gcovr_base_cmd +
+ ['-x',
+ '-e', re.escape(subproject_root),
+ '-o', os.path.join(log_dir, 'coverage.xml')
+ ] + gcov_exe_args)
+ outfiles.append(('Xml', pathlib.Path(log_dir, 'coverage.xml')))
+ elif outputs:
+ print('gcovr >= 3.3 needed to generate Xml coverage report')
+ exitcode = 1
+
+ if not outputs or 'text' in outputs:
+ if gcovr_exe:
+ subprocess.check_call(gcovr_base_cmd +
+ ['-e', re.escape(subproject_root),
+ '-o', os.path.join(log_dir, 'coverage.txt')
+ ] + gcov_exe_args)
+ outfiles.append(('Text', pathlib.Path(log_dir, 'coverage.txt')))
+ elif outputs:
+ print('gcovr >= 3.3 needed to generate text coverage report')
+ exitcode = 1
+
+ if not outputs or 'html' in outputs:
+ if lcov_exe and genhtml_exe:
+ htmloutdir = os.path.join(log_dir, 'coveragereport')
+ covinfo = os.path.join(log_dir, 'coverage.info')
+ initial_tracefile = covinfo + '.initial'
+ run_tracefile = covinfo + '.run'
+ raw_tracefile = covinfo + '.raw'
+ if use_llvm_cov:
+ # Create a shim to allow using llvm-cov as a gcov tool.
+ if mesonlib.is_windows():
+ llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.bat')
+ with open(llvm_cov_shim_path, 'w', encoding='utf-8') as llvm_cov_bat:
+ llvm_cov_bat.write(f'@"{llvm_cov_exe}" gcov %*')
+ else:
+ llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.sh')
+ with open(llvm_cov_shim_path, 'w', encoding='utf-8') as llvm_cov_sh:
+ llvm_cov_sh.write(f'#!/usr/bin/env sh\nexec "{llvm_cov_exe}" gcov $@')
+ os.chmod(llvm_cov_shim_path, os.stat(llvm_cov_shim_path).st_mode | stat.S_IEXEC)
+ gcov_tool_args = ['--gcov-tool', llvm_cov_shim_path]
+ else:
+ gcov_tool_args = []
+ subprocess.check_call([lcov_exe,
+ '--directory', build_root,
+ '--capture',
+ '--initial',
+ '--output-file',
+ initial_tracefile] +
+ gcov_tool_args)
+ subprocess.check_call([lcov_exe,
+ '--directory', build_root,
+ '--capture',
+ '--output-file', run_tracefile,
+ '--no-checksum',
+ '--rc', 'lcov_branch_coverage=1'] +
+ gcov_tool_args)
+ # Join initial and test results.
+ subprocess.check_call([lcov_exe,
+ '-a', initial_tracefile,
+ '-a', run_tracefile,
+ '--rc', 'lcov_branch_coverage=1',
+ '-o', raw_tracefile])
+ # Remove all directories outside the source_root from the covinfo
+ subprocess.check_call([lcov_exe,
+ '--extract', raw_tracefile,
+ os.path.join(source_root, '*'),
+ '--rc', 'lcov_branch_coverage=1',
+ '--output-file', covinfo])
+ # Remove all directories inside subproject dir
+ subprocess.check_call([lcov_exe,
+ '--remove', covinfo,
+ os.path.join(subproject_root, '*'),
+ '--rc', 'lcov_branch_coverage=1',
+ '--output-file', covinfo])
+ subprocess.check_call([genhtml_exe,
+ '--prefix', build_root,
+ '--prefix', source_root,
+ '--output-directory', htmloutdir,
+ '--title', 'Code coverage',
+ '--legend',
+ '--show-details',
+ '--branch-coverage',
+ covinfo])
+ outfiles.append(('Html', pathlib.Path(htmloutdir, 'index.html')))
+ elif gcovr_exe:
+ htmloutdir = os.path.join(log_dir, 'coveragereport')
+ if not os.path.isdir(htmloutdir):
+ os.mkdir(htmloutdir)
+ subprocess.check_call(gcovr_base_cmd +
+ ['--html',
+ '--html-details',
+ '--print-summary',
+ '-e', re.escape(subproject_root),
+ '-o', os.path.join(htmloutdir, 'index.html'),
+ ])
+ outfiles.append(('Html', pathlib.Path(htmloutdir, 'index.html')))
+ elif outputs:
+ print('lcov/genhtml or gcovr >= 3.3 needed to generate Html coverage report')
+ exitcode = 1
+
+ if not outputs and not outfiles:
+ print('Need gcovr or lcov/genhtml to generate any coverage reports')
+ exitcode = 1
+
+ if outfiles:
+ print('')
+ for (filetype, path) in outfiles:
+ print(filetype + ' coverage report can be found at', path.as_uri())
+
+ return exitcode
+
+def run(args: T.List[str]) -> int:
+ if not os.path.isfile('build.ninja'):
+ print('Coverage currently only works with the Ninja backend.')
+ return 1
+ parser = argparse.ArgumentParser(description='Generate coverage reports')
+ parser.add_argument('--text', dest='outputs', action='append_const',
+ const='text', help='generate Text report')
+ parser.add_argument('--xml', dest='outputs', action='append_const',
+ const='xml', help='generate Xml report')
+ parser.add_argument('--html', dest='outputs', action='append_const',
+ const='html', help='generate Html report')
+ parser.add_argument('--use_llvm_cov', action='store_true',
+ help='use llvm-cov')
+ parser.add_argument('source_root')
+ parser.add_argument('subproject_root')
+ parser.add_argument('build_root')
+ parser.add_argument('log_dir')
+ options = parser.parse_args(args)
+ return coverage(options.outputs, options.source_root,
+ options.subproject_root, options.build_root,
+ options.log_dir, options.use_llvm_cov)
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
diff --git a/meson/mesonbuild/scripts/delwithsuffix.py b/meson/mesonbuild/scripts/delwithsuffix.py
new file mode 100644
index 000000000..873db0d40
--- /dev/null
+++ b/meson/mesonbuild/scripts/delwithsuffix.py
@@ -0,0 +1,36 @@
+# Copyright 2013 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os, sys
+import typing as T
+
+def run(args: T.List[str]) -> int:
+ if len(args) != 2:
+ print('delwithsuffix.py <root of subdir to process> <suffix to delete>')
+ sys.exit(1)
+
+ topdir = args[0]
+ suffix = args[1]
+ if suffix[0] != '.':
+ suffix = '.' + suffix
+
+ for (root, _, files) in os.walk(topdir):
+ for f in files:
+ if f.endswith(suffix):
+ fullname = os.path.join(root, f)
+ os.unlink(fullname)
+ return 0
+
+if __name__ == '__main__':
+ run(sys.argv[1:])
diff --git a/meson/mesonbuild/scripts/depfixer.py b/meson/mesonbuild/scripts/depfixer.py
new file mode 100644
index 000000000..52c7ba969
--- /dev/null
+++ b/meson/mesonbuild/scripts/depfixer.py
@@ -0,0 +1,509 @@
+# Copyright 2013-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import sys
+import os
+import stat
+import struct
+import shutil
+import subprocess
+import typing as T
+
+from ..mesonlib import OrderedSet
+
+SHT_STRTAB = 3
+DT_NEEDED = 1
+DT_RPATH = 15
+DT_RUNPATH = 29
+DT_STRTAB = 5
+DT_SONAME = 14
+DT_MIPS_RLD_MAP_REL = 1879048245
+
+# Global cache for tools
+INSTALL_NAME_TOOL = False
+
+class DataSizes:
+ def __init__(self, ptrsize: int, is_le: bool) -> None:
+ if is_le:
+ p = '<'
+ else:
+ p = '>'
+ self.Half = p + 'h'
+ self.HalfSize = 2
+ self.Word = p + 'I'
+ self.WordSize = 4
+ self.Sword = p + 'i'
+ self.SwordSize = 4
+ if ptrsize == 64:
+ self.Addr = p + 'Q'
+ self.AddrSize = 8
+ self.Off = p + 'Q'
+ self.OffSize = 8
+ self.XWord = p + 'Q'
+ self.XWordSize = 8
+ self.Sxword = p + 'q'
+ self.SxwordSize = 8
+ else:
+ self.Addr = p + 'I'
+ self.AddrSize = 4
+ self.Off = p + 'I'
+ self.OffSize = 4
+
+class DynamicEntry(DataSizes):
+ def __init__(self, ifile: T.BinaryIO, ptrsize: int, is_le: bool) -> None:
+ super().__init__(ptrsize, is_le)
+ self.ptrsize = ptrsize
+ if ptrsize == 64:
+ self.d_tag = struct.unpack(self.Sxword, ifile.read(self.SxwordSize))[0]
+ self.val = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+ else:
+ self.d_tag = struct.unpack(self.Sword, ifile.read(self.SwordSize))[0]
+ self.val = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+
+ def write(self, ofile: T.BinaryIO) -> None:
+ if self.ptrsize == 64:
+ ofile.write(struct.pack(self.Sxword, self.d_tag))
+ ofile.write(struct.pack(self.XWord, self.val))
+ else:
+ ofile.write(struct.pack(self.Sword, self.d_tag))
+ ofile.write(struct.pack(self.Word, self.val))
+
+class SectionHeader(DataSizes):
+ def __init__(self, ifile: T.BinaryIO, ptrsize: int, is_le: bool) -> None:
+ super().__init__(ptrsize, is_le)
+ if ptrsize == 64:
+ is_64 = True
+ else:
+ is_64 = False
+# Elf64_Word
+ self.sh_name = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Word
+ self.sh_type = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Xword
+ if is_64:
+ self.sh_flags = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+ else:
+ self.sh_flags = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Addr
+ self.sh_addr = struct.unpack(self.Addr, ifile.read(self.AddrSize))[0]
+# Elf64_Off
+ self.sh_offset = struct.unpack(self.Off, ifile.read(self.OffSize))[0]
+# Elf64_Xword
+ if is_64:
+ self.sh_size = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+ else:
+ self.sh_size = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Word
+ self.sh_link = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Word
+ self.sh_info = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Xword
+ if is_64:
+ self.sh_addralign = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+ else:
+ self.sh_addralign = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Xword
+ if is_64:
+ self.sh_entsize = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+ else:
+ self.sh_entsize = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+
+class Elf(DataSizes):
+ def __init__(self, bfile: str, verbose: bool = True) -> None:
+ self.bfile = bfile
+ self.verbose = verbose
+ self.sections = [] # type: T.List[SectionHeader]
+ self.dynamic = [] # type: T.List[DynamicEntry]
+ self.open_bf(bfile)
+ try:
+ (self.ptrsize, self.is_le) = self.detect_elf_type()
+ super().__init__(self.ptrsize, self.is_le)
+ self.parse_header()
+ self.parse_sections()
+ self.parse_dynamic()
+ except (struct.error, RuntimeError):
+ self.close_bf()
+ raise
+
+ def open_bf(self, bfile: str) -> None:
+ self.bf = None
+ self.bf_perms = None
+ try:
+ self.bf = open(bfile, 'r+b')
+ except PermissionError as e:
+ self.bf_perms = stat.S_IMODE(os.lstat(bfile).st_mode)
+ os.chmod(bfile, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+ try:
+ self.bf = open(bfile, 'r+b')
+ except Exception:
+ os.chmod(bfile, self.bf_perms)
+ self.bf_perms = None
+ raise e
+
+ def close_bf(self) -> None:
+ if self.bf is not None:
+ if self.bf_perms is not None:
+ os.fchmod(self.bf.fileno(), self.bf_perms)
+ self.bf_perms = None
+ self.bf.close()
+ self.bf = None
+
+ def __enter__(self) -> 'Elf':
+ return self
+
+ def __del__(self) -> None:
+ self.close_bf()
+
+ def __exit__(self, exc_type: T.Any, exc_value: T.Any, traceback: T.Any) -> None:
+ self.close_bf()
+
+ def detect_elf_type(self) -> T.Tuple[int, bool]:
+ data = self.bf.read(6)
+ if data[1:4] != b'ELF':
+ # This script gets called to non-elf targets too
+ # so just ignore them.
+ if self.verbose:
+ print('File "%s" is not an ELF file.' % self.bfile)
+ sys.exit(0)
+ if data[4] == 1:
+ ptrsize = 32
+ elif data[4] == 2:
+ ptrsize = 64
+ else:
+ sys.exit('File "%s" has unknown ELF class.' % self.bfile)
+ if data[5] == 1:
+ is_le = True
+ elif data[5] == 2:
+ is_le = False
+ else:
+ sys.exit('File "%s" has unknown ELF endianness.' % self.bfile)
+ return ptrsize, is_le
+
+ def parse_header(self) -> None:
+ self.bf.seek(0)
+ self.e_ident = struct.unpack('16s', self.bf.read(16))[0]
+ self.e_type = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_machine = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_version = struct.unpack(self.Word, self.bf.read(self.WordSize))[0]
+ self.e_entry = struct.unpack(self.Addr, self.bf.read(self.AddrSize))[0]
+ self.e_phoff = struct.unpack(self.Off, self.bf.read(self.OffSize))[0]
+ self.e_shoff = struct.unpack(self.Off, self.bf.read(self.OffSize))[0]
+ self.e_flags = struct.unpack(self.Word, self.bf.read(self.WordSize))[0]
+ self.e_ehsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_phentsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_phnum = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_shentsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_shnum = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+ self.e_shstrndx = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+
+ def parse_sections(self) -> None:
+ self.bf.seek(self.e_shoff)
+ for _ in range(self.e_shnum):
+ self.sections.append(SectionHeader(self.bf, self.ptrsize, self.is_le))
+
+ def read_str(self) -> bytes:
+ arr = []
+ x = self.bf.read(1)
+ while x != b'\0':
+ arr.append(x)
+ x = self.bf.read(1)
+ if x == b'':
+ raise RuntimeError('Tried to read past the end of the file')
+ return b''.join(arr)
+
+ def find_section(self, target_name: bytes) -> T.Optional[SectionHeader]:
+ section_names = self.sections[self.e_shstrndx]
+ for i in self.sections:
+ self.bf.seek(section_names.sh_offset + i.sh_name)
+ name = self.read_str()
+ if name == target_name:
+ return i
+ return None
+
+ def parse_dynamic(self) -> None:
+ sec = self.find_section(b'.dynamic')
+ if sec is None:
+ return
+ self.bf.seek(sec.sh_offset)
+ while True:
+ e = DynamicEntry(self.bf, self.ptrsize, self.is_le)
+ self.dynamic.append(e)
+ if e.d_tag == 0:
+ break
+
+ def print_section_names(self) -> None:
+ section_names = self.sections[self.e_shstrndx]
+ for i in self.sections:
+ self.bf.seek(section_names.sh_offset + i.sh_name)
+ name = self.read_str()
+ print(name.decode())
+
+ def print_soname(self) -> None:
+ soname = None
+ strtab = None
+ for i in self.dynamic:
+ if i.d_tag == DT_SONAME:
+ soname = i
+ if i.d_tag == DT_STRTAB:
+ strtab = i
+ if soname is None or strtab is None:
+ print("This file does not have a soname")
+ return
+ self.bf.seek(strtab.val + soname.val)
+ print(self.read_str())
+
+ def get_entry_offset(self, entrynum: int) -> T.Optional[int]:
+ sec = self.find_section(b'.dynstr')
+ for i in self.dynamic:
+ if i.d_tag == entrynum:
+ res = sec.sh_offset + i.val
+ assert isinstance(res, int)
+ return res
+ return None
+
+ def print_rpath(self) -> None:
+ offset = self.get_entry_offset(DT_RPATH)
+ if offset is None:
+ print("This file does not have an rpath.")
+ else:
+ self.bf.seek(offset)
+ print(self.read_str())
+
+ def print_runpath(self) -> None:
+ offset = self.get_entry_offset(DT_RUNPATH)
+ if offset is None:
+ print("This file does not have a runpath.")
+ else:
+ self.bf.seek(offset)
+ print(self.read_str())
+
+ def print_deps(self) -> None:
+ sec = self.find_section(b'.dynstr')
+ deps = []
+ for i in self.dynamic:
+ if i.d_tag == DT_NEEDED:
+ deps.append(i)
+ for i in deps:
+ offset = sec.sh_offset + i.val
+ self.bf.seek(offset)
+ name = self.read_str()
+ print(name)
+
+ def fix_deps(self, prefix: bytes) -> None:
+ sec = self.find_section(b'.dynstr')
+ deps = []
+ for i in self.dynamic:
+ if i.d_tag == DT_NEEDED:
+ deps.append(i)
+ for i in deps:
+ offset = sec.sh_offset + i.val
+ self.bf.seek(offset)
+ name = self.read_str()
+ if name.startswith(prefix):
+ basename = name.split(b'/')[-1]
+ padding = b'\0' * (len(name) - len(basename))
+ newname = basename + padding
+ assert(len(newname) == len(name))
+ self.bf.seek(offset)
+ self.bf.write(newname)
+
+ def fix_rpath(self, rpath_dirs_to_remove: T.Set[bytes], new_rpath: bytes) -> None:
+ # The path to search for can be either rpath or runpath.
+ # Fix both of them to be sure.
+ self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RPATH)
+ self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RUNPATH)
+
+ def fix_rpathtype_entry(self, rpath_dirs_to_remove: T.Set[bytes], new_rpath: bytes, entrynum: int) -> None:
+ rp_off = self.get_entry_offset(entrynum)
+ if rp_off is None:
+ if self.verbose:
+ print('File does not have rpath. It should be a fully static executable.')
+ return
+ self.bf.seek(rp_off)
+
+ old_rpath = self.read_str()
+ # Some rpath entries may come from multiple sources.
+ # Only add each one once.
+ new_rpaths = OrderedSet() # type: OrderedSet[bytes]
+ if new_rpath:
+ new_rpaths.update(new_rpath.split(b':'))
+ if old_rpath:
+ # Filter out build-only rpath entries
+ # added by get_link_dep_subdirs() or
+ # specified by user with build_rpath.
+ for rpath_dir in old_rpath.split(b':'):
+ if not (rpath_dir in rpath_dirs_to_remove or
+ rpath_dir == (b'X' * len(rpath_dir))):
+ if rpath_dir:
+ new_rpaths.add(rpath_dir)
+
+ # Prepend user-specified new entries while preserving the ones that came from pkgconfig etc.
+ new_rpath = b':'.join(new_rpaths)
+
+ if len(old_rpath) < len(new_rpath):
+ msg = "New rpath must not be longer than the old one.\n Old: {}\n New: {}".format(old_rpath.decode('utf-8'), new_rpath.decode('utf-8'))
+ sys.exit(msg)
+ # The linker does read-only string deduplication. If there is a
+ # string that shares a suffix with the rpath, they might get
+ # dedupped. This means changing the rpath string might break something
+ # completely unrelated. This has already happened once with X.org.
+ # Thus we want to keep this change as small as possible to minimize
+ # the chance of obliterating other strings. It might still happen
+ # but our behavior is identical to what chrpath does and it has
+ # been in use for ages so based on that this should be rare.
+ if not new_rpath:
+ self.remove_rpath_entry(entrynum)
+ else:
+ self.bf.seek(rp_off)
+ self.bf.write(new_rpath)
+ self.bf.write(b'\0')
+
+ def remove_rpath_entry(self, entrynum: int) -> None:
+ sec = self.find_section(b'.dynamic')
+ if sec is None:
+ return None
+ for (i, entry) in enumerate(self.dynamic):
+ if entry.d_tag == entrynum:
+ rpentry = self.dynamic[i]
+ rpentry.d_tag = 0
+ self.dynamic = self.dynamic[:i] + self.dynamic[i + 1:] + [rpentry]
+ break
+ # DT_MIPS_RLD_MAP_REL is relative to the offset of the tag. Adjust it consequently.
+ for entry in self.dynamic[i:]:
+ if entry.d_tag == DT_MIPS_RLD_MAP_REL:
+ entry.val += 2 * (self.ptrsize // 8)
+ break
+ self.bf.seek(sec.sh_offset)
+ for entry in self.dynamic:
+ entry.write(self.bf)
+ return None
+
+def fix_elf(fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: T.Optional[bytes], verbose: bool = True) -> None:
+ with Elf(fname, verbose) as e:
+ if new_rpath is None:
+ e.print_rpath()
+ e.print_runpath()
+ else:
+ e.fix_rpath(rpath_dirs_to_remove, new_rpath)
+
+def get_darwin_rpaths_to_remove(fname: str) -> T.List[str]:
+ out = subprocess.check_output(['otool', '-l', fname],
+ universal_newlines=True,
+ stderr=subprocess.DEVNULL)
+ result = []
+ current_cmd = 'FOOBAR'
+ for line in out.split('\n'):
+ line = line.strip()
+ if ' ' not in line:
+ continue
+ key, value = line.strip().split(' ', 1)
+ if key == 'cmd':
+ current_cmd = value
+ if key == 'path' and current_cmd == 'LC_RPATH':
+ rp = value.split('(', 1)[0].strip()
+ result.append(rp)
+ return result
+
+def fix_darwin(fname: str, new_rpath: str, final_path: str, install_name_mappings: T.Dict[str, str]) -> None:
+ try:
+ rpaths = get_darwin_rpaths_to_remove(fname)
+ except subprocess.CalledProcessError:
+ # Otool failed, which happens when invoked on a
+ # non-executable target. Just return.
+ return
+ try:
+ args = []
+ if rpaths:
+ # TODO: fix this properly, not totally clear how
+ #
+ # removing rpaths from binaries on macOS has tons of
+ # weird edge cases. For instance, if the user provided
+ # a '-Wl,-rpath' argument in LDFLAGS that happens to
+ # coincide with an rpath generated from a dependency,
+ # this would cause installation failures, as meson would
+ # generate install_name_tool calls with two identical
+ # '-delete_rpath' arguments, which install_name_tool
+ # fails on. Because meson itself ensures that it never
+ # adds duplicate rpaths, duplicate rpaths necessarily
+ # come from user variables. The idea of using OrderedSet
+ # is to remove *at most one* duplicate RPATH entry. This
+ # is not optimal, as it only respects the user's choice
+ # partially: if they provided a non-duplicate '-Wl,-rpath'
+ # argument, it gets removed, if they provided a duplicate
+ # one, it remains in the final binary. A potentially optimal
+ # solution would split all user '-Wl,-rpath' arguments from
+ # LDFLAGS, and later add them back with '-add_rpath'.
+ for rp in OrderedSet(rpaths):
+ args += ['-delete_rpath', rp]
+ subprocess.check_call(['install_name_tool', fname] + args,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL)
+ args = []
+ if new_rpath:
+ args += ['-add_rpath', new_rpath]
+ # Rewrite -install_name @rpath/libfoo.dylib to /path/to/libfoo.dylib
+ if fname.endswith('dylib'):
+ args += ['-id', final_path]
+ if install_name_mappings:
+ for old, new in install_name_mappings.items():
+ args += ['-change', old, new]
+ if args:
+ subprocess.check_call(['install_name_tool', fname] + args,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL)
+ except Exception as err:
+ raise SystemExit(err)
+
+def fix_jar(fname: str) -> None:
+ subprocess.check_call(['jar', 'xfv', fname, 'META-INF/MANIFEST.MF'])
+ with open('META-INF/MANIFEST.MF', 'r+', encoding='utf-8') as f:
+ lines = f.readlines()
+ f.seek(0)
+ for line in lines:
+ if not line.startswith('Class-Path:'):
+ f.write(line)
+ f.truncate()
+ subprocess.check_call(['jar', 'ufm', fname, 'META-INF/MANIFEST.MF'])
+
+def fix_rpath(fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: T.Union[str, bytes], final_path: str, install_name_mappings: T.Dict[str, str], verbose: bool = True) -> None:
+ global INSTALL_NAME_TOOL
+ # Static libraries, import libraries, debug information, headers, etc
+ # never have rpaths
+ # DLLs and EXE currently do not need runtime path fixing
+ if fname.endswith(('.a', '.lib', '.pdb', '.h', '.hpp', '.dll', '.exe')):
+ return
+ try:
+ if fname.endswith('.jar'):
+ fix_jar(fname)
+ return
+ if isinstance(new_rpath, str):
+ new_rpath = new_rpath.encode('utf8')
+ fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose)
+ return
+ except SystemExit as e:
+ if isinstance(e.code, int) and e.code == 0:
+ pass
+ else:
+ raise
+ # We don't look for this on import because it will do a useless PATH lookup
+ # on non-mac platforms. That can be expensive on some Windows machines
+ # (upto 30ms), which is significant with --only-changed. For details, see:
+ # https://github.com/mesonbuild/meson/pull/6612#discussion_r378581401
+ if INSTALL_NAME_TOOL is False:
+ INSTALL_NAME_TOOL = bool(shutil.which('install_name_tool'))
+ if INSTALL_NAME_TOOL:
+ if isinstance(new_rpath, bytes):
+ new_rpath = new_rpath.decode('utf8')
+ fix_darwin(fname, new_rpath, final_path, install_name_mappings)
diff --git a/meson/mesonbuild/scripts/depscan.py b/meson/mesonbuild/scripts/depscan.py
new file mode 100644
index 000000000..9fc435b5d
--- /dev/null
+++ b/meson/mesonbuild/scripts/depscan.py
@@ -0,0 +1,201 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pathlib
+import pickle
+import re
+import os
+import sys
+import typing as T
+
+from ..backend.ninjabackend import TargetDependencyScannerInfo, ninja_quote
+from ..compilers.compilers import lang_suffixes
+
+CPP_IMPORT_RE = re.compile(r'\w*import ([a-zA-Z0-9]+);')
+CPP_EXPORT_RE = re.compile(r'\w*export module ([a-zA-Z0-9]+);')
+
+FORTRAN_INCLUDE_PAT = r"^\s*include\s*['\"](\w+\.\w+)['\"]"
+FORTRAN_MODULE_PAT = r"^\s*\bmodule\b\s+(\w+)\s*(?:!+.*)*$"
+FORTRAN_SUBMOD_PAT = r"^\s*\bsubmodule\b\s*\((\w+:?\w+)\)\s*(\w+)"
+FORTRAN_USE_PAT = r"^\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)"
+
+FORTRAN_MODULE_RE = re.compile(FORTRAN_MODULE_PAT, re.IGNORECASE)
+FORTRAN_SUBMOD_RE = re.compile(FORTRAN_SUBMOD_PAT, re.IGNORECASE)
+FORTRAN_USE_RE = re.compile(FORTRAN_USE_PAT, re.IGNORECASE)
+
+class DependencyScanner:
+ def __init__(self, pickle_file: str, outfile: str, sources: T.List[str]):
+ with open(pickle_file, 'rb') as pf:
+ self.target_data = pickle.load(pf) # type: TargetDependencyScannerInfo
+ self.outfile = outfile
+ self.sources = sources
+ self.provided_by = {} # type: T.Dict[str, str]
+ self.exports = {} # type: T.Dict[str, str]
+ self.needs = {} # type: T.Dict[str, T.List[str]]
+ self.sources_with_exports = [] # type: T.List[str]
+
+ def scan_file(self, fname: str) -> None:
+ suffix = os.path.splitext(fname)[1][1:].lower()
+ if suffix in lang_suffixes['fortran']:
+ self.scan_fortran_file(fname)
+ elif suffix in lang_suffixes['cpp']:
+ self.scan_cpp_file(fname)
+ else:
+ sys.exit(f'Can not scan files with suffix .{suffix}.')
+
+ def scan_fortran_file(self, fname: str) -> None:
+ fpath = pathlib.Path(fname)
+ modules_in_this_file = set()
+ for line in fpath.read_text(encoding='utf-8').split('\n'):
+ import_match = FORTRAN_USE_RE.match(line)
+ export_match = FORTRAN_MODULE_RE.match(line)
+ submodule_export_match = FORTRAN_SUBMOD_RE.match(line)
+ if import_match:
+ needed = import_match.group(1).lower()
+ # In Fortran you have an using declaration also for the module
+ # you define in the same file. Prevent circular dependencies.
+ if needed not in modules_in_this_file:
+ if fname in self.needs:
+ self.needs[fname].append(needed)
+ else:
+ self.needs[fname] = [needed]
+ if export_match:
+ exported_module = export_match.group(1).lower()
+ assert(exported_module not in modules_in_this_file)
+ modules_in_this_file.add(exported_module)
+ if exported_module in self.provided_by:
+ raise RuntimeError(f'Multiple files provide module {exported_module}.')
+ self.sources_with_exports.append(fname)
+ self.provided_by[exported_module] = fname
+ self.exports[fname] = exported_module
+ if submodule_export_match:
+ # Store submodule "Foo" "Bar" as "foo:bar".
+ # A submodule declaration can be both an import and an export declaration:
+ #
+ # submodule (a1:a2) a3
+ # - requires a1@a2.smod
+ # - produces a1@a3.smod
+ parent_module_name_full = submodule_export_match.group(1).lower()
+ parent_module_name = parent_module_name_full.split(':')[0]
+ submodule_name = submodule_export_match.group(2).lower()
+ concat_name = f'{parent_module_name}:{submodule_name}'
+ self.sources_with_exports.append(fname)
+ self.provided_by[concat_name] = fname
+ self.exports[fname] = concat_name
+ # Fortran requires that the immediate parent module must be built
+ # before the current one. Thus:
+ #
+ # submodule (parent) parent <- requires parent.mod (really parent.smod, but they are created at the same time)
+ # submodule (a1:a2) a3 <- requires a1@a2.smod
+ #
+ # a3 does not depend on the a1 parent module directly, only transitively.
+ if fname in self.needs:
+ self.needs[fname].append(parent_module_name_full)
+ else:
+ self.needs[fname] = [parent_module_name_full]
+
+
+ def scan_cpp_file(self, fname: str) -> None:
+ fpath = pathlib.Path(fname)
+ for line in fpath.read_text(encoding='utf-8').split('\n'):
+ import_match = CPP_IMPORT_RE.match(line)
+ export_match = CPP_EXPORT_RE.match(line)
+ if import_match:
+ needed = import_match.group(1)
+ if fname in self.needs:
+ self.needs[fname].append(needed)
+ else:
+ self.needs[fname] = [needed]
+ if export_match:
+ exported_module = export_match.group(1)
+ if exported_module in self.provided_by:
+ raise RuntimeError(f'Multiple files provide module {exported_module}.')
+ self.sources_with_exports.append(fname)
+ self.provided_by[exported_module] = fname
+ self.exports[fname] = exported_module
+
+ def objname_for(self, src: str) -> str:
+ objname = self.target_data.source2object[src]
+ assert(isinstance(objname, str))
+ return objname
+
+ def module_name_for(self, src: str) -> str:
+ suffix = os.path.splitext(src)[1][1:].lower()
+ if suffix in lang_suffixes['fortran']:
+ exported = self.exports[src]
+ # Module foo:bar goes to a file name foo@bar.smod
+ # Module Foo goes to a file name foo.mod
+ namebase = exported.replace(':', '@')
+ if ':' in exported:
+ extension = 'smod'
+ else:
+ extension = 'mod'
+ return os.path.join(self.target_data.private_dir, f'{namebase}.{extension}')
+ elif suffix in lang_suffixes['cpp']:
+ return '{}.ifc'.format(self.exports[src])
+ else:
+ raise RuntimeError('Unreachable code.')
+
+ def scan(self) -> int:
+ for s in self.sources:
+ self.scan_file(s)
+ with open(self.outfile, 'w', encoding='utf-8') as ofile:
+ ofile.write('ninja_dyndep_version = 1\n')
+ for src in self.sources:
+ objfilename = self.objname_for(src)
+ mods_and_submods_needed = []
+ module_files_generated = []
+ module_files_needed = []
+ if src in self.sources_with_exports:
+ module_files_generated.append(self.module_name_for(src))
+ if src in self.needs:
+ for modname in self.needs[src]:
+ if modname not in self.provided_by:
+ # Nothing provides this module, we assume that it
+ # comes from a dependency library somewhere and is
+ # already built by the time this compilation starts.
+ pass
+ else:
+ mods_and_submods_needed.append(modname)
+
+ for modname in mods_and_submods_needed:
+ provider_src = self.provided_by[modname]
+ provider_modfile = self.module_name_for(provider_src)
+ # Prune self-dependencies
+ if provider_src != src:
+ module_files_needed.append(provider_modfile)
+
+ quoted_objfilename = ninja_quote(objfilename, True)
+ quoted_module_files_generated = [ninja_quote(x, True) for x in module_files_generated]
+ quoted_module_files_needed = [ninja_quote(x, True) for x in module_files_needed]
+ if quoted_module_files_generated:
+ mod_gen = '| ' + ' '.join(quoted_module_files_generated)
+ else:
+ mod_gen = ''
+ if quoted_module_files_needed:
+ mod_dep = '| ' + ' '.join(quoted_module_files_needed)
+ else:
+ mod_dep = ''
+ build_line = 'build {} {}: dyndep {}'.format(quoted_objfilename,
+ mod_gen,
+ mod_dep)
+ ofile.write(build_line + '\n')
+ return 0
+
+def run(args: T.List[str]) -> int:
+ pickle_file = args[0]
+ outfile = args[1]
+ sources = args[2:]
+ scanner = DependencyScanner(pickle_file, outfile, sources)
+ return scanner.scan()
diff --git a/meson/mesonbuild/scripts/dirchanger.py b/meson/mesonbuild/scripts/dirchanger.py
new file mode 100644
index 000000000..21632cd89
--- /dev/null
+++ b/meson/mesonbuild/scripts/dirchanger.py
@@ -0,0 +1,29 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''CD into dir given as first argument and execute
+the command given in the rest of the arguments.'''
+
+import os, subprocess, sys
+import typing as T
+
+def run(args: T.List[str]) -> int:
+ dirname = args[0]
+ command = args[1:]
+
+ os.chdir(dirname)
+ return subprocess.call(command)
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
diff --git a/meson/mesonbuild/scripts/externalproject.py b/meson/mesonbuild/scripts/externalproject.py
new file mode 100644
index 000000000..a8e3bfe2f
--- /dev/null
+++ b/meson/mesonbuild/scripts/externalproject.py
@@ -0,0 +1,109 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import argparse
+import multiprocessing
+import subprocess
+from pathlib import Path
+import typing as T
+
+from ..mesonlib import Popen_safe
+
+class ExternalProject:
+ def __init__(self, options: argparse.Namespace):
+ self.name = options.name
+ self.src_dir = options.srcdir
+ self.build_dir = options.builddir
+ self.install_dir = options.installdir
+ self.log_dir = options.logdir
+ self.verbose = options.verbose
+ self.stampfile = options.stampfile
+ self.depfile = options.depfile
+ self.make = options.make
+
+ def write_depfile(self) -> None:
+ with open(self.depfile, 'w', encoding='utf-8') as f:
+ f.write(f'{self.stampfile}: \\\n')
+ for dirpath, dirnames, filenames in os.walk(self.src_dir):
+ dirnames[:] = [d for d in dirnames if not d.startswith('.')]
+ for fname in filenames:
+ if fname.startswith('.'):
+ continue
+ path = Path(dirpath, fname)
+ f.write(' {} \\\n'.format(path.as_posix().replace(' ', '\\ ')))
+
+ def write_stampfile(self) -> None:
+ with open(self.stampfile, 'w', encoding='utf-8') as f:
+ pass
+
+ def gnu_make(self) -> bool:
+ p, o, e = Popen_safe([self.make, '--version'])
+ if p.returncode == 0 and 'GNU Make' in o:
+ return True
+ return False
+
+ def build(self) -> int:
+ make_cmd = [self.make]
+ if self.gnu_make():
+ make_cmd.append('-j' + str(multiprocessing.cpu_count()))
+
+ rc = self._run('build', make_cmd)
+ if rc != 0:
+ return rc
+
+ install_cmd = make_cmd + ['DESTDIR= ' + self.install_dir, 'install']
+ rc = self._run('install', install_cmd)
+ if rc != 0:
+ return rc
+
+ self.write_depfile()
+ self.write_stampfile()
+
+ return 0
+
+ def _run(self, step: str, command: T.List[str]) -> int:
+ m = 'Running command ' + str(command) + ' in directory ' + str(self.build_dir) + '\n'
+ log_filename = Path(self.log_dir, f'{self.name}-{step}.log')
+ output = None
+ if not self.verbose:
+ output = open(log_filename, 'w', encoding='utf-8')
+ output.write(m + '\n')
+ output.flush()
+ else:
+ print(m)
+ p, o, e = Popen_safe(command, stderr=subprocess.STDOUT, stdout=output,
+ cwd=self.build_dir)
+ if p.returncode != 0:
+ m = f'{step} step returned error code {p.returncode}.'
+ if not self.verbose:
+ m += '\nSee logs: ' + str(log_filename)
+ print(m)
+ return p.returncode
+
+def run(args: T.List[str]) -> int:
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--name')
+ parser.add_argument('--srcdir')
+ parser.add_argument('--builddir')
+ parser.add_argument('--installdir')
+ parser.add_argument('--logdir')
+ parser.add_argument('--make')
+ parser.add_argument('--verbose', action='store_true')
+ parser.add_argument('stampfile')
+ parser.add_argument('depfile')
+
+ options = parser.parse_args(args)
+ ep = ExternalProject(options)
+ return ep.build()
diff --git a/meson/mesonbuild/scripts/gettext.py b/meson/mesonbuild/scripts/gettext.py
new file mode 100644
index 000000000..b1ce6af1c
--- /dev/null
+++ b/meson/mesonbuild/scripts/gettext.py
@@ -0,0 +1,125 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import shutil
+import argparse
+import subprocess
+from . import destdir_join
+import typing as T
+
+parser = argparse.ArgumentParser()
+parser.add_argument('command')
+parser.add_argument('--pkgname', default='')
+parser.add_argument('--datadirs', default='')
+parser.add_argument('--langs', default='')
+parser.add_argument('--localedir', default='')
+parser.add_argument('--subdir', default='')
+parser.add_argument('--extra-args', default='')
+
+def read_linguas(src_sub: str) -> T.List[str]:
+ # Syntax of this file is documented here:
+ # https://www.gnu.org/software/gettext/manual/html_node/po_002fLINGUAS.html
+ linguas = os.path.join(src_sub, 'LINGUAS')
+ try:
+ langs = []
+ with open(linguas, encoding='utf-8') as f:
+ for line in f:
+ line = line.strip()
+ if line and not line.startswith('#'):
+ langs += line.split()
+ return langs
+ except (FileNotFoundError, PermissionError):
+ print(f'Could not find file LINGUAS in {src_sub}')
+ return []
+
+def run_potgen(src_sub: str, pkgname: str, datadirs: str, args: T.List[str]) -> int:
+ listfile = os.path.join(src_sub, 'POTFILES.in')
+ if not os.path.exists(listfile):
+ listfile = os.path.join(src_sub, 'POTFILES')
+ if not os.path.exists(listfile):
+ print('Could not find file POTFILES in %s' % src_sub)
+ return 1
+
+ child_env = os.environ.copy()
+ if datadirs:
+ child_env['GETTEXTDATADIRS'] = datadirs
+
+ ofile = os.path.join(src_sub, pkgname + '.pot')
+ return subprocess.call(['xgettext', '--package-name=' + pkgname, '-p', src_sub, '-f', listfile,
+ '-D', os.environ['MESON_SOURCE_ROOT'], '-k_', '-o', ofile] + args,
+ env=child_env)
+
+def gen_gmo(src_sub: str, bld_sub: str, langs: T.List[str]) -> int:
+ for l in langs:
+ subprocess.check_call(['msgfmt', os.path.join(src_sub, l + '.po'),
+ '-o', os.path.join(bld_sub, l + '.gmo')])
+ return 0
+
+def update_po(src_sub: str, pkgname: str, langs: T.List[str]) -> int:
+ potfile = os.path.join(src_sub, pkgname + '.pot')
+ for l in langs:
+ pofile = os.path.join(src_sub, l + '.po')
+ if os.path.exists(pofile):
+ subprocess.check_call(['msgmerge', '-q', '-o', pofile, pofile, potfile])
+ else:
+ subprocess.check_call(['msginit', '--input', potfile, '--output-file', pofile, '--locale', l, '--no-translator'])
+ return 0
+
+def do_install(src_sub: str, bld_sub: str, dest: str, pkgname: str, langs: T.List[str]) -> int:
+ for l in langs:
+ srcfile = os.path.join(bld_sub, l + '.gmo')
+ outfile = os.path.join(dest, l, 'LC_MESSAGES',
+ pkgname + '.mo')
+ tempfile = outfile + '.tmp'
+ os.makedirs(os.path.dirname(outfile), exist_ok=True)
+ shutil.copy2(srcfile, tempfile)
+ os.replace(tempfile, outfile)
+ if not os.getenv('MESON_INSTALL_QUIET', False):
+ print(f'Installing {srcfile} to {outfile}')
+ return 0
+
+def run(args: T.List[str]) -> int:
+ options = parser.parse_args(args)
+ subcmd = options.command
+ langs = options.langs.split('@@') if options.langs else None
+ extra_args = options.extra_args.split('@@') if options.extra_args else []
+ subdir = os.environ.get('MESON_SUBDIR', '')
+ if options.subdir:
+ subdir = options.subdir
+ src_sub = os.path.join(os.environ['MESON_SOURCE_ROOT'], subdir)
+ bld_sub = os.path.join(os.environ['MESON_BUILD_ROOT'], subdir)
+
+ if not langs:
+ langs = read_linguas(src_sub)
+
+ if subcmd == 'pot':
+ return run_potgen(src_sub, options.pkgname, options.datadirs, extra_args)
+ elif subcmd == 'gen_gmo':
+ return gen_gmo(src_sub, bld_sub, langs)
+ elif subcmd == 'update_po':
+ if run_potgen(src_sub, options.pkgname, options.datadirs, extra_args) != 0:
+ return 1
+ return update_po(src_sub, options.pkgname, langs)
+ elif subcmd == 'install':
+ destdir = os.environ.get('DESTDIR', '')
+ dest = destdir_join(destdir, os.path.join(os.environ['MESON_INSTALL_PREFIX'],
+ options.localedir))
+ if gen_gmo(src_sub, bld_sub, langs) != 0:
+ return 1
+ do_install(src_sub, bld_sub, dest, options.pkgname, langs)
+ else:
+ print('Unknown subcommand.')
+ return 1
+ return 0
diff --git a/meson/mesonbuild/scripts/gtkdochelper.py b/meson/mesonbuild/scripts/gtkdochelper.py
new file mode 100644
index 000000000..153c3d933
--- /dev/null
+++ b/meson/mesonbuild/scripts/gtkdochelper.py
@@ -0,0 +1,295 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys, os
+import subprocess
+import shutil
+import argparse
+from ..mesonlib import MesonException, Popen_safe, is_windows, is_cygwin, split_args
+from . import destdir_join
+import typing as T
+
+parser = argparse.ArgumentParser()
+
+parser.add_argument('--sourcedir', dest='sourcedir')
+parser.add_argument('--builddir', dest='builddir')
+parser.add_argument('--subdir', dest='subdir')
+parser.add_argument('--headerdirs', dest='headerdirs')
+parser.add_argument('--mainfile', dest='mainfile')
+parser.add_argument('--modulename', dest='modulename')
+parser.add_argument('--moduleversion', dest='moduleversion')
+parser.add_argument('--htmlargs', dest='htmlargs', default='')
+parser.add_argument('--scanargs', dest='scanargs', default='')
+parser.add_argument('--scanobjsargs', dest='scanobjsargs', default='')
+parser.add_argument('--gobjects-types-file', dest='gobject_typesfile', default='')
+parser.add_argument('--fixxrefargs', dest='fixxrefargs', default='')
+parser.add_argument('--mkdbargs', dest='mkdbargs', default='')
+parser.add_argument('--ld', dest='ld', default='')
+parser.add_argument('--cc', dest='cc', default='')
+parser.add_argument('--ldflags', dest='ldflags', default='')
+parser.add_argument('--cflags', dest='cflags', default='')
+parser.add_argument('--content-files', dest='content_files', default='')
+parser.add_argument('--expand-content-files', dest='expand_content_files', default='')
+parser.add_argument('--html-assets', dest='html_assets', default='')
+parser.add_argument('--ignore-headers', dest='ignore_headers', default='')
+parser.add_argument('--namespace', dest='namespace', default='')
+parser.add_argument('--mode', dest='mode', default='')
+parser.add_argument('--installdir', dest='install_dir')
+parser.add_argument('--run', dest='run', default='')
+for tool in ['scan', 'scangobj', 'mkdb', 'mkhtml', 'fixxref']:
+ program_name = 'gtkdoc-' + tool
+ parser.add_argument('--' + program_name, dest=program_name.replace('-', '_'))
+
+def gtkdoc_run_check(cmd: T.List[str], cwd: str, library_paths: T.Optional[T.List[str]] = None) -> None:
+ if library_paths is None:
+ library_paths = []
+
+ env = dict(os.environ)
+ if is_windows() or is_cygwin():
+ if 'PATH' in env:
+ library_paths.extend(env['PATH'].split(os.pathsep))
+ env['PATH'] = os.pathsep.join(library_paths)
+ else:
+ if 'LD_LIBRARY_PATH' in env:
+ library_paths.extend(env['LD_LIBRARY_PATH'].split(os.pathsep))
+ env['LD_LIBRARY_PATH'] = os.pathsep.join(library_paths)
+
+ if is_windows():
+ cmd.insert(0, sys.executable)
+
+ # Put stderr into stdout since we want to print it out anyway.
+ # This preserves the order of messages.
+ p, out = Popen_safe(cmd, cwd=cwd, env=env, stderr=subprocess.STDOUT)[0:2]
+ if p.returncode != 0:
+ err_msg = [f"{cmd!r} failed with status {p.returncode:d}"]
+ if out:
+ err_msg.append(out)
+ raise MesonException('\n'.join(err_msg))
+ elif out:
+ # Unfortunately Windows cmd.exe consoles may be using a codepage
+ # that might choke print() with a UnicodeEncodeError, so let's
+ # ignore such errors for now, as a compromise as we are outputting
+ # console output here...
+ try:
+ print(out)
+ except UnicodeEncodeError:
+ pass
+
+def build_gtkdoc(source_root: str, build_root: str, doc_subdir: str, src_subdirs: T.List[str],
+ main_file: str, module: str, module_version: str,
+ html_args: T.List[str], scan_args: T.List[str], fixxref_args: T.List[str], mkdb_args: T.List[str],
+ gobject_typesfile: str, scanobjs_args: T.List[str], run: str, ld: str, cc: str, ldflags: str, cflags: str,
+ html_assets: T.List[str], content_files: T.List[str], ignore_headers: T.List[str], namespace: str,
+ expand_content_files: T.List[str], mode: str, options: argparse.Namespace) -> None:
+ print("Building documentation for %s" % module)
+
+ src_dir_args = []
+ for src_dir in src_subdirs:
+ if not os.path.isabs(src_dir):
+ dirs = [os.path.join(source_root, src_dir),
+ os.path.join(build_root, src_dir)]
+ else:
+ dirs = [src_dir]
+ src_dir_args += ['--source-dir=' + d for d in dirs]
+
+ doc_src = os.path.join(source_root, doc_subdir)
+ abs_out = os.path.join(build_root, doc_subdir)
+ htmldir = os.path.join(abs_out, 'html')
+
+ content_files += [main_file]
+ sections = os.path.join(doc_src, module + "-sections.txt")
+ if os.path.exists(sections):
+ content_files.append(sections)
+
+ overrides = os.path.join(doc_src, module + "-overrides.txt")
+ if os.path.exists(overrides):
+ content_files.append(overrides)
+
+ # Copy files to build directory
+ for f in content_files:
+ # FIXME: Use mesonlib.File objects so we don't need to do this
+ if not os.path.isabs(f):
+ f = os.path.join(doc_src, f)
+ elif os.path.commonpath([f, build_root]) == build_root:
+ continue
+ shutil.copyfile(f, os.path.join(abs_out, os.path.basename(f)))
+
+ shutil.rmtree(htmldir, ignore_errors=True)
+ try:
+ os.mkdir(htmldir)
+ except Exception:
+ pass
+
+ for f in html_assets:
+ f_abs = os.path.join(doc_src, f)
+ shutil.copyfile(f_abs, os.path.join(htmldir, os.path.basename(f_abs)))
+
+ scan_cmd = [options.gtkdoc_scan, '--module=' + module] + src_dir_args
+ if ignore_headers:
+ scan_cmd.append('--ignore-headers=' + ' '.join(ignore_headers))
+ # Add user-specified arguments
+ scan_cmd += scan_args
+ gtkdoc_run_check(scan_cmd, abs_out)
+
+ # Use the generated types file when available, otherwise gobject_typesfile
+ # would often be a path to source dir instead of build dir.
+ if '--rebuild-types' in scan_args:
+ gobject_typesfile = os.path.join(abs_out, module + '.types')
+
+ if gobject_typesfile:
+ scanobjs_cmd = [options.gtkdoc_scangobj] + scanobjs_args
+ scanobjs_cmd += ['--types=' + gobject_typesfile,
+ '--module=' + module,
+ '--run=' + run,
+ '--cflags=' + cflags,
+ '--ldflags=' + ldflags,
+ '--cc=' + cc,
+ '--ld=' + ld,
+ '--output-dir=' + abs_out]
+
+ library_paths = []
+ for ldflag in split_args(ldflags):
+ if ldflag.startswith('-Wl,-rpath,'):
+ library_paths.append(ldflag[11:])
+
+ gtkdoc_run_check(scanobjs_cmd, build_root, library_paths)
+
+ # Make docbook files
+ if mode == 'auto':
+ # Guessing is probably a poor idea but these keeps compat
+ # with previous behavior
+ if main_file.endswith('sgml'):
+ modeflag = '--sgml-mode'
+ else:
+ modeflag = '--xml-mode'
+ elif mode == 'xml':
+ modeflag = '--xml-mode'
+ elif mode == 'sgml':
+ modeflag = '--sgml-mode'
+ else: # none
+ modeflag = None
+
+ mkdb_cmd = [options.gtkdoc_mkdb,
+ '--module=' + module,
+ '--output-format=xml',
+ '--expand-content-files=' + ' '.join(expand_content_files),
+ ] + src_dir_args
+ if namespace:
+ mkdb_cmd.append('--name-space=' + namespace)
+ if modeflag:
+ mkdb_cmd.append(modeflag)
+ if main_file:
+ # Yes, this is the flag even if the file is in xml.
+ mkdb_cmd.append('--main-sgml-file=' + main_file)
+ # Add user-specified arguments
+ mkdb_cmd += mkdb_args
+ gtkdoc_run_check(mkdb_cmd, abs_out)
+
+ # Make HTML documentation
+ mkhtml_cmd = [options.gtkdoc_mkhtml,
+ '--path=' + ':'.join((doc_src, abs_out)),
+ module,
+ ] + html_args
+ if main_file:
+ mkhtml_cmd.append('../' + main_file)
+ else:
+ mkhtml_cmd.append('%s-docs.xml' % module)
+ # html gen must be run in the HTML dir
+ gtkdoc_run_check(mkhtml_cmd, htmldir)
+
+ # Fix cross-references in HTML files
+ fixref_cmd = [options.gtkdoc_fixxref,
+ '--module=' + module,
+ '--module-dir=html'] + fixxref_args
+ gtkdoc_run_check(fixref_cmd, abs_out)
+
+ if module_version:
+ shutil.move(os.path.join(htmldir, f'{module}.devhelp2'),
+ os.path.join(htmldir, f'{module}-{module_version}.devhelp2'))
+
+def install_gtkdoc(build_root: str, doc_subdir: str, install_prefix: str, datadir: str, module: str) -> None:
+ source = os.path.join(build_root, doc_subdir, 'html')
+ final_destination = os.path.join(install_prefix, datadir, module)
+ shutil.rmtree(final_destination, ignore_errors=True)
+ shutil.copytree(source, final_destination)
+
+def run(args: T.List[str]) -> int:
+ options = parser.parse_args(args)
+ if options.htmlargs:
+ htmlargs = options.htmlargs.split('@@')
+ else:
+ htmlargs = []
+ if options.scanargs:
+ scanargs = options.scanargs.split('@@')
+ else:
+ scanargs = []
+ if options.scanobjsargs:
+ scanobjsargs = options.scanobjsargs.split('@@')
+ else:
+ scanobjsargs = []
+ if options.fixxrefargs:
+ fixxrefargs = options.fixxrefargs.split('@@')
+ else:
+ fixxrefargs = []
+ if options.mkdbargs:
+ mkdbargs = options.mkdbargs.split('@@')
+ else:
+ mkdbargs = []
+ build_gtkdoc(
+ options.sourcedir,
+ options.builddir,
+ options.subdir,
+ options.headerdirs.split('@@'),
+ options.mainfile,
+ options.modulename,
+ options.moduleversion,
+ htmlargs,
+ scanargs,
+ fixxrefargs,
+ mkdbargs,
+ options.gobject_typesfile,
+ scanobjsargs,
+ options.run,
+ options.ld,
+ options.cc,
+ options.ldflags,
+ options.cflags,
+ options.html_assets.split('@@') if options.html_assets else [],
+ options.content_files.split('@@') if options.content_files else [],
+ options.ignore_headers.split('@@') if options.ignore_headers else [],
+ options.namespace,
+ options.expand_content_files.split('@@') if options.expand_content_files else [],
+ options.mode,
+ options)
+
+ if 'MESON_INSTALL_PREFIX' in os.environ:
+ destdir = os.environ.get('DESTDIR', '')
+ install_prefix = destdir_join(destdir, os.environ['MESON_INSTALL_PREFIX'])
+ if options.install_dir:
+ install_dir = options.install_dir
+ else:
+ install_dir = options.modulename
+ if options.moduleversion:
+ install_dir += '-' + options.moduleversion
+ if os.path.isabs(install_dir):
+ install_dir = destdir_join(destdir, install_dir)
+ install_gtkdoc(options.builddir,
+ options.subdir,
+ install_prefix,
+ 'share/gtk-doc/html',
+ install_dir)
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
diff --git a/meson/mesonbuild/scripts/hotdochelper.py b/meson/mesonbuild/scripts/hotdochelper.py
new file mode 100644
index 000000000..a96a34afa
--- /dev/null
+++ b/meson/mesonbuild/scripts/hotdochelper.py
@@ -0,0 +1,38 @@
+import os
+import shutil
+import subprocess
+
+from . import destdir_join
+
+import argparse
+import typing as T
+
+parser = argparse.ArgumentParser()
+parser.add_argument('--install')
+parser.add_argument('--extra-extension-path', action="append", default=[])
+parser.add_argument('--name')
+parser.add_argument('--builddir')
+parser.add_argument('--project-version')
+
+
+def run(argv: T.List[str]) -> int:
+ options, args = parser.parse_known_args(argv)
+ subenv = os.environ.copy()
+
+ for ext_path in options.extra_extension_path:
+ subenv['PYTHONPATH'] = subenv.get('PYTHONPATH', '') + ':' + ext_path
+
+ res = subprocess.call(args, cwd=options.builddir, env=subenv)
+ if res != 0:
+ return res
+
+ if options.install:
+ source_dir = os.path.join(options.builddir, options.install)
+ destdir = os.environ.get('DESTDIR', '')
+ installdir = destdir_join(destdir,
+ os.path.join(os.environ['MESON_INSTALL_PREFIX'],
+ 'share/doc/', options.name, "html"))
+
+ shutil.rmtree(installdir, ignore_errors=True)
+ shutil.copytree(source_dir, installdir)
+ return 0
diff --git a/meson/mesonbuild/scripts/meson_exe.py b/meson/mesonbuild/scripts/meson_exe.py
new file mode 100644
index 000000000..cd3534cb0
--- /dev/null
+++ b/meson/mesonbuild/scripts/meson_exe.py
@@ -0,0 +1,125 @@
+# Copyright 2013-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import argparse
+import pickle
+import subprocess
+import typing as T
+import locale
+
+from .. import mesonlib
+from ..backend.backends import ExecutableSerialisation
+
+options = None
+
+def buildparser() -> argparse.ArgumentParser:
+ parser = argparse.ArgumentParser(description='Custom executable wrapper for Meson. Do not run on your own, mmm\'kay?')
+ parser.add_argument('--unpickle')
+ parser.add_argument('--capture')
+ parser.add_argument('--feed')
+ return parser
+
+def run_exe(exe: ExecutableSerialisation, extra_env: T.Optional[dict] = None) -> int:
+ if exe.exe_runner:
+ if not exe.exe_runner.found():
+ raise AssertionError('BUG: Can\'t run cross-compiled exe {!r} with not-found '
+ 'wrapper {!r}'.format(exe.cmd_args[0], exe.exe_runner.get_path()))
+ cmd_args = exe.exe_runner.get_command() + exe.cmd_args
+ else:
+ cmd_args = exe.cmd_args
+ child_env = os.environ.copy()
+ if extra_env:
+ child_env.update(extra_env)
+ if exe.env:
+ child_env = exe.env.get_env(child_env)
+ if exe.extra_paths:
+ child_env['PATH'] = (os.pathsep.join(exe.extra_paths + ['']) +
+ child_env['PATH'])
+ if exe.exe_runner and mesonlib.substring_is_in_list('wine', exe.exe_runner.get_command()):
+ child_env['WINEPATH'] = mesonlib.get_wine_shortpath(
+ exe.exe_runner.get_command(),
+ ['Z:' + p for p in exe.extra_paths] + child_env.get('WINEPATH', '').split(';')
+ )
+
+ stdin = None
+ if exe.feed:
+ stdin = open(exe.feed, 'rb')
+
+ pipe = subprocess.PIPE
+ if exe.verbose:
+ assert not exe.capture, 'Cannot capture and print to console at the same time'
+ pipe = None
+
+ p = subprocess.Popen(cmd_args, env=child_env, cwd=exe.workdir,
+ close_fds=False, stdin=stdin, stdout=pipe, stderr=pipe)
+ stdout, stderr = p.communicate()
+
+ if stdin is not None:
+ stdin.close()
+
+ if p.returncode == 0xc0000135:
+ # STATUS_DLL_NOT_FOUND on Windows indicating a common problem that is otherwise hard to diagnose
+ raise FileNotFoundError('due to missing DLLs')
+
+ if p.returncode != 0:
+ if exe.pickled:
+ print(f'while executing {cmd_args!r}')
+ if exe.verbose:
+ return p.returncode
+ encoding = locale.getpreferredencoding()
+ if not exe.capture:
+ print('--- stdout ---')
+ print(stdout.decode(encoding=encoding, errors='replace'))
+ print('--- stderr ---')
+ print(stderr.decode(encoding=encoding, errors='replace'))
+ return p.returncode
+
+ if exe.capture:
+ skip_write = False
+ try:
+ with open(exe.capture, 'rb') as cur:
+ skip_write = cur.read() == stdout
+ except OSError:
+ pass
+ if not skip_write:
+ with open(exe.capture, 'wb') as output:
+ output.write(stdout)
+
+ return 0
+
+def run(args: T.List[str]) -> int:
+ global options
+ parser = buildparser()
+ options, cmd_args = parser.parse_known_args(args)
+ # argparse supports double dash to separate options and positional arguments,
+ # but the user has to remove it manually.
+ if cmd_args and cmd_args[0] == '--':
+ cmd_args = cmd_args[1:]
+ if not options.unpickle and not cmd_args:
+ parser.error('either --unpickle or executable and arguments are required')
+ if options.unpickle:
+ if cmd_args or options.capture or options.feed:
+ parser.error('no other arguments can be used with --unpickle')
+ with open(options.unpickle, 'rb') as f:
+ exe = pickle.load(f)
+ exe.pickled = True
+ else:
+ exe = ExecutableSerialisation(cmd_args, capture=options.capture, feed=options.feed)
+
+ return run_exe(exe)
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
diff --git a/meson/mesonbuild/scripts/msgfmthelper.py b/meson/mesonbuild/scripts/msgfmthelper.py
new file mode 100644
index 000000000..3ddc9e6a9
--- /dev/null
+++ b/meson/mesonbuild/scripts/msgfmthelper.py
@@ -0,0 +1,37 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import subprocess
+import os
+import typing as T
+
+parser = argparse.ArgumentParser()
+parser.add_argument('input')
+parser.add_argument('output')
+parser.add_argument('type')
+parser.add_argument('podir')
+parser.add_argument('--datadirs', default='')
+parser.add_argument('args', default=[], metavar='extra msgfmt argument', nargs='*')
+
+
+def run(args: T.List[str]) -> int:
+ options = parser.parse_args(args)
+ env = None
+ if options.datadirs:
+ env = os.environ.copy()
+ env.update({'GETTEXTDATADIRS': options.datadirs})
+ return subprocess.call(['msgfmt', '--' + options.type, '-d', options.podir,
+ '--template', options.input, '-o', options.output] + options.args,
+ env=env)
diff --git a/meson/mesonbuild/scripts/regen_checker.py b/meson/mesonbuild/scripts/regen_checker.py
new file mode 100644
index 000000000..c96bdc1e5
--- /dev/null
+++ b/meson/mesonbuild/scripts/regen_checker.py
@@ -0,0 +1,64 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys, os
+import pickle, subprocess
+import typing as T
+from ..coredata import CoreData
+from ..backend.backends import RegenInfo
+from ..mesonlib import OptionKey
+
+# This could also be used for XCode.
+
+def need_regen(regeninfo: RegenInfo, regen_timestamp: float) -> bool:
+ for i in regeninfo.depfiles:
+ curfile = os.path.join(regeninfo.build_dir, i)
+ curtime = os.stat(curfile).st_mtime
+ if curtime > regen_timestamp:
+ return True
+ # The timestamp file gets automatically deleted by MSBuild during a 'Clean' build.
+ # We must make sure to recreate it, even if we do not regenerate the solution.
+ # Otherwise, Visual Studio will always consider the REGEN project out of date.
+ print("Everything is up-to-date, regeneration of build files is not needed.")
+ from ..backend.vs2010backend import Vs2010Backend
+ Vs2010Backend.touch_regen_timestamp(regeninfo.build_dir)
+ return False
+
+def regen(regeninfo: RegenInfo, meson_command: T.List[str], backend: str) -> None:
+ cmd = meson_command + ['--internal',
+ 'regenerate',
+ regeninfo.build_dir,
+ regeninfo.source_dir,
+ '--backend=' + backend]
+ subprocess.check_call(cmd)
+
+def run(args: T.List[str]) -> int:
+ private_dir = args[0]
+ dumpfile = os.path.join(private_dir, 'regeninfo.dump')
+ coredata_file = os.path.join(private_dir, 'coredata.dat')
+ with open(dumpfile, 'rb') as f:
+ regeninfo = pickle.load(f)
+ assert isinstance(regeninfo, RegenInfo)
+ with open(coredata_file, 'rb') as f:
+ coredata = pickle.load(f)
+ assert isinstance(coredata, CoreData)
+ backend = coredata.get_option(OptionKey('backend'))
+ assert isinstance(backend, str)
+ regen_timestamp = os.stat(dumpfile).st_mtime
+ if need_regen(regeninfo, regen_timestamp):
+ regen(regeninfo, coredata.meson_command, backend)
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
diff --git a/meson/mesonbuild/scripts/scanbuild.py b/meson/mesonbuild/scripts/scanbuild.py
new file mode 100644
index 000000000..bb8e30ce6
--- /dev/null
+++ b/meson/mesonbuild/scripts/scanbuild.py
@@ -0,0 +1,65 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+import shutil
+import tempfile
+from ..environment import detect_ninja, detect_scanbuild
+from ..coredata import get_cmd_line_file, CmdLineFileParser
+from ..mesonlib import windows_proof_rmtree
+from pathlib import Path
+import typing as T
+from ast import literal_eval
+import os
+
+def scanbuild(exelist: T.List[str], srcdir: Path, blddir: Path, privdir: Path, logdir: Path, args: T.List[str]) -> int:
+ # In case of problems leave the temp directory around
+ # so it can be debugged.
+ scandir = tempfile.mkdtemp(dir=str(privdir))
+ meson_cmd = exelist + args
+ build_cmd = exelist + ['-o', str(logdir)] + detect_ninja() + ['-C', scandir]
+ rc = subprocess.call(meson_cmd + [str(srcdir), scandir])
+ if rc != 0:
+ return rc
+ rc = subprocess.call(build_cmd)
+ if rc == 0:
+ windows_proof_rmtree(scandir)
+ return rc
+
+def run(args: T.List[str]) -> int:
+ srcdir = Path(args[0])
+ bldpath = Path(args[1])
+ blddir = args[1]
+ meson_cmd = args[2:]
+ privdir = bldpath / 'meson-private'
+ logdir = bldpath / 'meson-logs' / 'scanbuild'
+ shutil.rmtree(str(logdir), ignore_errors=True)
+
+ # if any cross or native files are specified we should use them
+ cmd = get_cmd_line_file(blddir)
+ data = CmdLineFileParser()
+ data.read(cmd)
+
+ if 'cross_file' in data['properties']:
+ meson_cmd.extend([f'--cross-file={os.path.abspath(f)}' for f in literal_eval(data['properties']['cross_file'])])
+
+ if 'native_file' in data['properties']:
+ meson_cmd.extend([f'--native-file={os.path.abspath(f)}' for f in literal_eval(data['properties']['native_file'])])
+
+ exelist = detect_scanbuild()
+ if not exelist:
+ print('Could not execute scan-build "%s"' % ' '.join(exelist))
+ return 1
+
+ return scanbuild(exelist, srcdir, bldpath, privdir, logdir, meson_cmd)
diff --git a/meson/mesonbuild/scripts/symbolextractor.py b/meson/mesonbuild/scripts/symbolextractor.py
new file mode 100644
index 000000000..17501e28b
--- /dev/null
+++ b/meson/mesonbuild/scripts/symbolextractor.py
@@ -0,0 +1,331 @@
+# Copyright 2013-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script extracts the symbols of a given shared library
+# into a file. If the symbols have not changed, the file is not
+# touched. This information is used to skip link steps if the
+# ABI has not changed.
+
+# This file is basically a reimplementation of
+# http://cgit.freedesktop.org/libreoffice/core/commit/?id=3213cd54b76bc80a6f0516aac75a48ff3b2ad67c
+
+import typing as T
+import os, sys
+from .. import mesonlib
+from .. import mlog
+from ..mesonlib import Popen_safe
+import argparse
+
+parser = argparse.ArgumentParser()
+
+parser.add_argument('--cross-host', default=None, dest='cross_host',
+ help='cross compilation host platform')
+parser.add_argument('args', nargs='+')
+
+TOOL_WARNING_FILE = None
+RELINKING_WARNING = 'Relinking will always happen on source changes.'
+
+def dummy_syms(outfilename: str) -> None:
+ """Just touch it so relinking happens always."""
+ with open(outfilename, 'w', encoding='utf-8'):
+ pass
+
+def write_if_changed(text: str, outfilename: str) -> None:
+ try:
+ with open(outfilename, encoding='utf-8') as f:
+ oldtext = f.read()
+ if text == oldtext:
+ return
+ except FileNotFoundError:
+ pass
+ with open(outfilename, 'w', encoding='utf-8') as f:
+ f.write(text)
+
+def print_tool_warning(tools: T.List[str], msg: str, stderr: T.Optional[str] = None) -> None:
+ global TOOL_WARNING_FILE
+ if os.path.exists(TOOL_WARNING_FILE):
+ return
+ m = f'{tools!r} {msg}. {RELINKING_WARNING}'
+ if stderr:
+ m += '\n' + stderr
+ mlog.warning(m)
+ # Write it out so we don't warn again
+ with open(TOOL_WARNING_FILE, 'w', encoding='utf-8'):
+ pass
+
+def get_tool(name: str) -> T.List[str]:
+ evar = name.upper()
+ if evar in os.environ:
+ import shlex
+ return shlex.split(os.environ[evar])
+ return [name]
+
+def call_tool(name: str, args: T.List[str], **kwargs: T.Any) -> str:
+ tool = get_tool(name)
+ try:
+ p, output, e = Popen_safe(tool + args, **kwargs)
+ except FileNotFoundError:
+ print_tool_warning(tool, 'not found')
+ return None
+ except PermissionError:
+ print_tool_warning(tool, 'not usable')
+ return None
+ if p.returncode != 0:
+ print_tool_warning(tool, 'does not work', e)
+ return None
+ return output
+
+def call_tool_nowarn(tool: T.List[str], **kwargs: T.Any) -> T.Tuple[str, str]:
+ try:
+ p, output, e = Popen_safe(tool, **kwargs)
+ except FileNotFoundError:
+ return None, '{!r} not found\n'.format(tool[0])
+ except PermissionError:
+ return None, '{!r} not usable\n'.format(tool[0])
+ if p.returncode != 0:
+ return None, e
+ return output, None
+
+def gnu_syms(libfilename: str, outfilename: str) -> None:
+ # Get the name of the library
+ output = call_tool('readelf', ['-d', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ result = [x for x in output.split('\n') if 'SONAME' in x]
+ assert(len(result) <= 1)
+ # Get a list of all symbols exported
+ output = call_tool('nm', ['--dynamic', '--extern-only', '--defined-only',
+ '--format=posix', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ for line in output.split('\n'):
+ if not line:
+ continue
+ line_split = line.split()
+ entry = line_split[0:2]
+ # Store the size of symbols pointing to data objects so we relink
+ # when those change, which is needed because of copy relocations
+ # https://github.com/mesonbuild/meson/pull/7132#issuecomment-628353702
+ if line_split[1].upper() in ('B', 'G', 'D') and len(line_split) >= 4:
+ entry += [line_split[3]]
+ result += [' '.join(entry)]
+ write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def solaris_syms(libfilename: str, outfilename: str) -> None:
+ # gnu_syms() works with GNU nm & readelf, not Solaris nm & elfdump
+ origpath = os.environ['PATH']
+ try:
+ os.environ['PATH'] = '/usr/gnu/bin:' + origpath
+ gnu_syms(libfilename, outfilename)
+ finally:
+ os.environ['PATH'] = origpath
+
+def osx_syms(libfilename: str, outfilename: str) -> None:
+ # Get the name of the library
+ output = call_tool('otool', ['-l', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ arr = output.split('\n')
+ for (i, val) in enumerate(arr):
+ if 'LC_ID_DYLIB' in val:
+ match = i
+ break
+ result = [arr[match + 2], arr[match + 5]] # Libreoffice stores all 5 lines but the others seem irrelevant.
+ # Get a list of all symbols exported
+ output = call_tool('nm', ['--extern-only', '--defined-only',
+ '--format=posix', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ result += [' '.join(x.split()[0:2]) for x in output.split('\n')]
+ write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def openbsd_syms(libfilename: str, outfilename: str) -> None:
+ # Get the name of the library
+ output = call_tool('readelf', ['-d', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ result = [x for x in output.split('\n') if 'SONAME' in x]
+ assert(len(result) <= 1)
+ # Get a list of all symbols exported
+ output = call_tool('nm', ['-D', '-P', '-g', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ # U = undefined (cope with the lack of --defined-only option)
+ result += [' '.join(x.split()[0:2]) for x in output.split('\n') if x and not x.endswith('U ')]
+ write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def freebsd_syms(libfilename: str, outfilename: str) -> None:
+ # Get the name of the library
+ output = call_tool('readelf', ['-d', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ result = [x for x in output.split('\n') if 'SONAME' in x]
+ assert(len(result) <= 1)
+ # Get a list of all symbols exported
+ output = call_tool('nm', ['--dynamic', '--extern-only', '--defined-only',
+ '--format=posix', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+
+ result += [' '.join(x.split()[0:2]) for x in output.split('\n')]
+ write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def cygwin_syms(impfilename: str, outfilename: str) -> None:
+ # Get the name of the library
+ output = call_tool('dlltool', ['-I', impfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ result = [output]
+ # Get the list of all symbols exported
+ output = call_tool('nm', ['--extern-only', '--defined-only',
+ '--format=posix', impfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ for line in output.split('\n'):
+ if ' T ' not in line:
+ continue
+ result.append(line.split(maxsplit=1)[0])
+ write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def _get_implib_dllname(impfilename: str) -> T.Tuple[T.List[str], str]:
+ all_stderr = ''
+ # First try lib.exe, which is provided by MSVC. Then llvm-lib.exe, by LLVM
+ # for clang-cl.
+ #
+ # We cannot call get_tool on `lib` because it will look at the `LIB` env
+ # var which is the list of library paths MSVC will search for import
+ # libraries while linking.
+ for lib in (['lib'], get_tool('llvm-lib')):
+ output, e = call_tool_nowarn(lib + ['-list', impfilename])
+ if output:
+ # The output is a list of DLLs that each symbol exported by the import
+ # library is available in. We only build import libraries that point to
+ # a single DLL, so we can pick any of these. Pick the last one for
+ # simplicity. Also skip the last line, which is empty.
+ return output.split('\n')[-2:-1], None
+ all_stderr += e
+ # Next, try dlltool.exe which is provided by MinGW
+ output, e = call_tool_nowarn(get_tool('dlltool') + ['-I', impfilename])
+ if output:
+ return [output], None
+ all_stderr += e
+ return ([], all_stderr)
+
+def _get_implib_exports(impfilename: str) -> T.Tuple[T.List[str], str]:
+ all_stderr = ''
+ # Force dumpbin.exe to use en-US so we can parse its output
+ env = os.environ.copy()
+ env['VSLANG'] = '1033'
+ output, e = call_tool_nowarn(get_tool('dumpbin') + ['-exports', impfilename], env=env)
+ if output:
+ lines = output.split('\n')
+ start = lines.index('File Type: LIBRARY')
+ end = lines.index(' Summary')
+ return lines[start:end], None
+ all_stderr += e
+ # Next, try llvm-nm.exe provided by LLVM, then nm.exe provided by MinGW
+ for nm in ('llvm-nm', 'nm'):
+ output, e = call_tool_nowarn(get_tool(nm) + ['--extern-only', '--defined-only',
+ '--format=posix', impfilename])
+ if output:
+ result = []
+ for line in output.split('\n'):
+ if ' T ' not in line or line.startswith('.text'):
+ continue
+ result.append(line.split(maxsplit=1)[0])
+ return result, None
+ all_stderr += e
+ return ([], all_stderr)
+
+def windows_syms(impfilename: str, outfilename: str) -> None:
+ # Get the name of the library
+ result, e = _get_implib_dllname(impfilename)
+ if not result:
+ print_tool_warning(['lib', 'llvm-lib', 'dlltool'], 'do not work or were not found', e)
+ dummy_syms(outfilename)
+ return
+ # Get a list of all symbols exported
+ symbols, e = _get_implib_exports(impfilename)
+ if not symbols:
+ print_tool_warning(['dumpbin', 'llvm-nm', 'nm'], 'do not work or were not found', e)
+ dummy_syms(outfilename)
+ return
+ result += symbols
+ write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host: str) -> None:
+ if cross_host is not None:
+ # In case of cross builds just always relink. In theory we could
+ # determine the correct toolset, but we would need to use the correct
+ # `nm`, `readelf`, etc, from the cross info which requires refactoring.
+ dummy_syms(outfilename)
+ elif mesonlib.is_linux() or mesonlib.is_hurd():
+ gnu_syms(libfilename, outfilename)
+ elif mesonlib.is_osx():
+ osx_syms(libfilename, outfilename)
+ elif mesonlib.is_openbsd():
+ openbsd_syms(libfilename, outfilename)
+ elif mesonlib.is_freebsd():
+ freebsd_syms(libfilename, outfilename)
+ elif mesonlib.is_windows():
+ if os.path.isfile(impfilename):
+ windows_syms(impfilename, outfilename)
+ else:
+ # No import library. Not sure how the DLL is being used, so just
+ # rebuild everything that links to it every time.
+ dummy_syms(outfilename)
+ elif mesonlib.is_cygwin():
+ if os.path.isfile(impfilename):
+ cygwin_syms(impfilename, outfilename)
+ else:
+ # No import library. Not sure how the DLL is being used, so just
+ # rebuild everything that links to it every time.
+ dummy_syms(outfilename)
+ elif mesonlib.is_sunos():
+ solaris_syms(libfilename, outfilename)
+ else:
+ if not os.path.exists(TOOL_WARNING_FILE):
+ mlog.warning('Symbol extracting has not been implemented for this '
+ 'platform. ' + RELINKING_WARNING)
+ # Write it out so we don't warn again
+ with open(TOOL_WARNING_FILE, 'w', encoding='utf-8'):
+ pass
+ dummy_syms(outfilename)
+
+def run(args: T.List[str]) -> int:
+ global TOOL_WARNING_FILE
+ options = parser.parse_args(args)
+ if len(options.args) != 4:
+ print('symbolextractor.py <shared library file> <import library> <output file>')
+ sys.exit(1)
+ privdir = os.path.join(options.args[0], 'meson-private')
+ TOOL_WARNING_FILE = os.path.join(privdir, 'symbolextractor_tool_warning_printed')
+ libfile = options.args[1]
+ impfile = options.args[2] # Only used on Windows
+ outfile = options.args[3]
+ gen_symbols(libfile, impfile, outfile, options.cross_host)
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
diff --git a/meson/mesonbuild/scripts/tags.py b/meson/mesonbuild/scripts/tags.py
new file mode 100644
index 000000000..9098efb39
--- /dev/null
+++ b/meson/mesonbuild/scripts/tags.py
@@ -0,0 +1,53 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+from pathlib import Path
+import typing as T
+
+def ls_as_bytestream() -> bytes:
+ if os.path.exists('.git'):
+ return subprocess.run(['git', 'ls-tree', '-r', '--name-only', 'HEAD'],
+ stdout=subprocess.PIPE).stdout
+
+ files = [str(p) for p in Path('.').glob('**/*')
+ if not p.is_dir() and
+ not next((x for x in p.parts if x.startswith('.')), None)]
+ return '\n'.join(files).encode()
+
+
+def cscope() -> int:
+ ls = b'\n'.join([b'"%s"' % f for f in ls_as_bytestream().split()])
+ return subprocess.run(['cscope', '-v', '-b', '-i-'], input=ls).returncode
+
+
+def ctags() -> int:
+ ls = ls_as_bytestream()
+ return subprocess.run(['ctags', '-L-'], input=ls).returncode
+
+
+def etags() -> int:
+ ls = ls_as_bytestream()
+ return subprocess.run(['etags', '-'], input=ls).returncode
+
+
+def run(args: T.List[str]) -> int:
+ tool_name = args[0]
+ srcdir_name = args[1]
+ os.chdir(srcdir_name)
+ assert tool_name in ['cscope', 'ctags', 'etags']
+ res = globals()[tool_name]()
+ assert isinstance(res, int)
+ return res
diff --git a/meson/mesonbuild/scripts/uninstall.py b/meson/mesonbuild/scripts/uninstall.py
new file mode 100644
index 000000000..f08490fbd
--- /dev/null
+++ b/meson/mesonbuild/scripts/uninstall.py
@@ -0,0 +1,50 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import typing as T
+
+logfile = 'meson-logs/install-log.txt'
+
+def do_uninstall(log: str) -> None:
+ failures = 0
+ successes = 0
+ for line in open(log, encoding='utf-8'):
+ if line.startswith('#'):
+ continue
+ fname = line.strip()
+ try:
+ if os.path.isdir(fname) and not os.path.islink(fname):
+ os.rmdir(fname)
+ else:
+ os.unlink(fname)
+ print('Deleted:', fname)
+ successes += 1
+ except Exception as e:
+ print(f'Could not delete {fname}: {e}.')
+ failures += 1
+ print('\nUninstall finished.\n')
+ print('Deleted:', successes)
+ print('Failed:', failures)
+ print('\nRemember that files created by custom scripts have not been removed.')
+
+def run(args: T.List[str]) -> int:
+ if args:
+ print('Weird error.')
+ return 1
+ if not os.path.exists(logfile):
+ print('Log file does not exist, no installation has been done.')
+ return 0
+ do_uninstall(logfile)
+ return 0
diff --git a/meson/mesonbuild/scripts/vcstagger.py b/meson/mesonbuild/scripts/vcstagger.py
new file mode 100644
index 000000000..18cf5f7f0
--- /dev/null
+++ b/meson/mesonbuild/scripts/vcstagger.py
@@ -0,0 +1,44 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys, os, subprocess, re
+import typing as T
+
+def config_vcs_tag(infile: str, outfile: str, fallback: str, source_dir: str, replace_string: str, regex_selector: str, cmd: T.List[str]) -> None:
+ try:
+ output = subprocess.check_output(cmd, cwd=source_dir)
+ new_string = re.search(regex_selector, output.decode()).group(1).strip()
+ except Exception:
+ new_string = fallback
+
+ with open(infile, encoding='utf-8') as f:
+ new_data = f.read().replace(replace_string, new_string)
+ if os.path.exists(outfile):
+ with open(outfile, encoding='utf-8') as f:
+ needs_update = (f.read() != new_data)
+ else:
+ needs_update = True
+ if needs_update:
+ with open(outfile, 'w', encoding='utf-8') as f:
+ f.write(new_data)
+
+
+def run(args: T.List[str]) -> int:
+ infile, outfile, fallback, source_dir, replace_string, regex_selector = args[0:6]
+ command = args[6:]
+ config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_selector, command)
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(run(sys.argv[1:]))
diff --git a/meson/mesonbuild/scripts/yelphelper.py b/meson/mesonbuild/scripts/yelphelper.py
new file mode 100644
index 000000000..374104bea
--- /dev/null
+++ b/meson/mesonbuild/scripts/yelphelper.py
@@ -0,0 +1,133 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+import shutil
+import argparse
+from .. import mlog
+from ..mesonlib import has_path_sep
+from . import destdir_join
+from .gettext import read_linguas
+import typing as T
+
+parser = argparse.ArgumentParser()
+parser.add_argument('command')
+parser.add_argument('--id', dest='project_id')
+parser.add_argument('--subdir', dest='subdir')
+parser.add_argument('--installdir', dest='install_dir')
+parser.add_argument('--sources', dest='sources')
+parser.add_argument('--media', dest='media', default='')
+parser.add_argument('--langs', dest='langs', default='')
+parser.add_argument('--symlinks', type=bool, dest='symlinks', default=False)
+
+def build_pot(srcdir: str, project_id: str, sources: T.List[str]) -> None:
+ # Must be relative paths
+ sources = [os.path.join('C', source) for source in sources]
+ outfile = os.path.join(srcdir, project_id + '.pot')
+ subprocess.call(['itstool', '-o', outfile] + sources)
+
+def update_po(srcdir: str, project_id: str, langs: T.List[str]) -> None:
+ potfile = os.path.join(srcdir, project_id + '.pot')
+ for lang in langs:
+ pofile = os.path.join(srcdir, lang, lang + '.po')
+ subprocess.call(['msgmerge', '-q', '-o', pofile, pofile, potfile])
+
+def build_translations(srcdir: str, blddir: str, langs: T.List[str]) -> None:
+ for lang in langs:
+ outdir = os.path.join(blddir, lang)
+ os.makedirs(outdir, exist_ok=True)
+ subprocess.call([
+ 'msgfmt', os.path.join(srcdir, lang, lang + '.po'),
+ '-o', os.path.join(outdir, lang + '.gmo')
+ ])
+
+def merge_translations(blddir: str, sources: T.List[str], langs: T.List[str]) -> None:
+ for lang in langs:
+ subprocess.call([
+ 'itstool', '-m', os.path.join(blddir, lang, lang + '.gmo'),
+ '-o', os.path.join(blddir, lang)
+ ] + sources)
+
+def install_help(srcdir: str, blddir: str, sources: T.List[str], media: T.List[str], langs: T.List[str], install_dir: str, destdir: str, project_id: str, symlinks: bool) -> None:
+ c_install_dir = os.path.join(install_dir, 'C', project_id)
+ for lang in langs + ['C']:
+ indir = destdir_join(destdir, os.path.join(install_dir, lang, project_id))
+ os.makedirs(indir, exist_ok=True)
+ for source in sources:
+ infile = os.path.join(srcdir if lang == 'C' else blddir, lang, source)
+ outfile = os.path.join(indir, source)
+ mlog.log(f'Installing {infile} to {outfile}')
+ shutil.copy2(infile, outfile)
+ for m in media:
+ infile = os.path.join(srcdir, lang, m)
+ outfile = os.path.join(indir, m)
+ c_infile = os.path.join(srcdir, 'C', m)
+ if not os.path.exists(infile):
+ if not os.path.exists(c_infile):
+ mlog.warning('Media file "%s" did not exist in C directory' % m)
+ continue
+ elif symlinks:
+ srcfile = os.path.join(c_install_dir, m)
+ mlog.log(f'Symlinking {outfile} to {srcfile}.')
+ if has_path_sep(m):
+ os.makedirs(os.path.dirname(outfile), exist_ok=True)
+ try:
+ try:
+ os.symlink(srcfile, outfile)
+ except FileExistsError:
+ os.remove(outfile)
+ os.symlink(srcfile, outfile)
+ continue
+ except (NotImplementedError, OSError):
+ mlog.warning('Symlinking not supported, falling back to copying')
+ infile = c_infile
+ else:
+ # Lang doesn't have media file so copy it over 'C' one
+ infile = c_infile
+ mlog.log(f'Installing {infile} to {outfile}')
+ if has_path_sep(m):
+ os.makedirs(os.path.dirname(outfile), exist_ok=True)
+ shutil.copyfile(infile, outfile)
+ shutil.copystat(infile, outfile)
+
+def run(args: T.List[str]) -> int:
+ options = parser.parse_args(args)
+ langs = options.langs.split('@@') if options.langs else []
+ media = options.media.split('@@') if options.media else []
+ sources = options.sources.split('@@')
+ destdir = os.environ.get('DESTDIR', '')
+ src_subdir = os.path.join(os.environ['MESON_SOURCE_ROOT'], options.subdir)
+ build_subdir = os.path.join(os.environ['MESON_BUILD_ROOT'], options.subdir)
+ abs_sources = [os.path.join(src_subdir, 'C', source) for source in sources]
+
+ if not langs:
+ langs = read_linguas(src_subdir)
+
+ if options.command == 'pot':
+ build_pot(src_subdir, options.project_id, sources)
+ elif options.command == 'update-po':
+ build_pot(src_subdir, options.project_id, sources)
+ update_po(src_subdir, options.project_id, langs)
+ elif options.command == 'build':
+ if langs:
+ build_translations(src_subdir, build_subdir, langs)
+ elif options.command == 'install':
+ install_dir = os.path.join(os.environ['MESON_INSTALL_PREFIX'], options.install_dir)
+ if langs:
+ build_translations(src_subdir, build_subdir, langs)
+ merge_translations(build_subdir, abs_sources, langs)
+ install_help(src_subdir, build_subdir, sources, media, langs, install_dir,
+ destdir, options.project_id, options.symlinks)
+ return 0
diff --git a/meson/mesonbuild/templates/__init__.py b/meson/mesonbuild/templates/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/meson/mesonbuild/templates/__init__.py
diff --git a/meson/mesonbuild/templates/cpptemplates.py b/meson/mesonbuild/templates/cpptemplates.py
new file mode 100644
index 000000000..61c273745
--- /dev/null
+++ b/meson/mesonbuild/templates/cpptemplates.py
@@ -0,0 +1,185 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_cpp_template = '''#include <iostream>
+
+#define PROJECT_NAME "{project_name}"
+
+int main(int argc, char **argv) {{
+ if(argc != 1) {{
+ std::cout << argv[0] << "takes no arguments.\\n";
+ return 1;
+ }}
+ std::cout << "This is project " << PROJECT_NAME << ".\\n";
+ return 0;
+}}
+'''
+
+hello_cpp_meson_template = '''project('{project_name}', 'cpp',
+ version : '{version}',
+ default_options : ['warning_level=3',
+ 'cpp_std=c++14'])
+
+exe = executable('{exe_name}', '{source_name}',
+ install : true)
+
+test('basic', exe)
+'''
+
+lib_hpp_template = '''#pragma once
+#if defined _WIN32 || defined __CYGWIN__
+ #ifdef BUILDING_{utoken}
+ #define {utoken}_PUBLIC __declspec(dllexport)
+ #else
+ #define {utoken}_PUBLIC __declspec(dllimport)
+ #endif
+#else
+ #ifdef BUILDING_{utoken}
+ #define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
+ #else
+ #define {utoken}_PUBLIC
+ #endif
+#endif
+
+namespace {namespace} {{
+
+class {utoken}_PUBLIC {class_name} {{
+
+public:
+ {class_name}();
+ int get_number() const;
+
+private:
+
+ int number;
+
+}};
+
+}}
+
+'''
+
+lib_cpp_template = '''#include <{header_file}>
+
+namespace {namespace} {{
+
+{class_name}::{class_name}() {{
+ number = 6;
+}}
+
+int {class_name}::get_number() const {{
+ return number;
+}}
+
+}}
+'''
+
+lib_cpp_test_template = '''#include <{header_file}>
+#include <iostream>
+
+int main(int argc, char **argv) {{
+ if(argc != 1) {{
+ std::cout << argv[0] << " takes no arguments.\\n";
+ return 1;
+ }}
+ {namespace}::{class_name} c;
+ return c.get_number() != 6;
+}}
+'''
+
+lib_cpp_meson_template = '''project('{project_name}', 'cpp',
+ version : '{version}',
+ default_options : ['warning_level=3', 'cpp_std=c++14'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+ install : true,
+ cpp_args : lib_args,
+ gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+ link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+ include_directories: include_directories('.'),
+ link_with : shlib)
+
+# Make this library usable from the system's
+# package manager.
+install_headers('{header_file}', subdir : '{header_dir}')
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+ name : '{project_name}',
+ filebase : '{ltoken}',
+ description : 'Meson sample project.',
+ subdirs : '{header_dir}',
+ libraries : shlib,
+ version : '{version}',
+)
+'''
+
+
+class CppProject(SampleImpl):
+ def __init__(self, options):
+ super().__init__()
+ self.name = options.name
+ self.version = options.version
+
+ def create_executable(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ source_name = lowercase_token + '.cpp'
+ open(source_name, 'w', encoding='utf-8').write(hello_cpp_template.format(project_name=self.name))
+ open('meson.build', 'w', encoding='utf-8').write(
+ hello_cpp_meson_template.format(project_name=self.name,
+ exe_name=lowercase_token,
+ source_name=source_name,
+ version=self.version))
+
+ def create_library(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ uppercase_token = lowercase_token.upper()
+ class_name = uppercase_token[0] + lowercase_token[1:]
+ test_exe_name = lowercase_token + '_test'
+ namespace = lowercase_token
+ lib_hpp_name = lowercase_token + '.hpp'
+ lib_cpp_name = lowercase_token + '.cpp'
+ test_cpp_name = lowercase_token + '_test.cpp'
+ kwargs = {'utoken': uppercase_token,
+ 'ltoken': lowercase_token,
+ 'header_dir': lowercase_token,
+ 'class_name': class_name,
+ 'namespace': namespace,
+ 'header_file': lib_hpp_name,
+ 'source_file': lib_cpp_name,
+ 'test_source_file': test_cpp_name,
+ 'test_exe_name': test_exe_name,
+ 'project_name': self.name,
+ 'lib_name': lowercase_token,
+ 'test_name': lowercase_token,
+ 'version': self.version,
+ }
+ open(lib_hpp_name, 'w', encoding='utf-8').write(lib_hpp_template.format(**kwargs))
+ open(lib_cpp_name, 'w', encoding='utf-8').write(lib_cpp_template.format(**kwargs))
+ open(test_cpp_name, 'w', encoding='utf-8').write(lib_cpp_test_template.format(**kwargs))
+ open('meson.build', 'w', encoding='utf-8').write(lib_cpp_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/cstemplates.py b/meson/mesonbuild/templates/cstemplates.py
new file mode 100644
index 000000000..bad7984e9
--- /dev/null
+++ b/meson/mesonbuild/templates/cstemplates.py
@@ -0,0 +1,134 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_cs_template = '''using System;
+
+public class {class_name} {{
+ const String PROJECT_NAME = "{project_name}";
+
+ static int Main(String[] args) {{
+ if (args.Length > 0) {{
+ System.Console.WriteLine(String.Format("{project_name} takes no arguments.."));
+ return 1;
+ }}
+ Console.WriteLine(String.Format("This is project {{0}}.", PROJECT_NAME));
+ return 0;
+ }}
+}}
+
+'''
+
+hello_cs_meson_template = '''project('{project_name}', 'cs',
+ version : '{version}',
+ default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+ install : true)
+
+test('basic', exe)
+'''
+
+lib_cs_template = '''
+public class {class_name} {{
+ private const int number = 6;
+
+ public int get_number() {{
+ return number;
+ }}
+}}
+
+'''
+
+lib_cs_test_template = '''using System;
+
+public class {class_test} {{
+ static int Main(String[] args) {{
+ if (args.Length > 0) {{
+ System.Console.WriteLine("{project_name} takes no arguments..");
+ return 1;
+ }}
+ {class_name} c = new {class_name}();
+ Boolean result = true;
+ return result.CompareTo(c.get_number() != 6);
+ }}
+}}
+
+'''
+
+lib_cs_meson_template = '''project('{project_name}', 'cs',
+ version : '{version}',
+ default_options : ['warning_level=3'])
+
+stlib = shared_library('{lib_name}', '{source_file}',
+ install : true,
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+ link_with : stlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+ include_directories: include_directories('.'),
+ link_with : stlib)
+
+'''
+
+
+class CSharpProject(SampleImpl):
+ def __init__(self, options):
+ super().__init__()
+ self.name = options.name
+ self.version = options.version
+
+ def create_executable(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ uppercase_token = lowercase_token.upper()
+ class_name = uppercase_token[0] + lowercase_token[1:]
+ source_name = uppercase_token[0] + lowercase_token[1:] + '.cs'
+ open(source_name, 'w', encoding='utf-8').write(
+ hello_cs_template.format(project_name=self.name,
+ class_name=class_name))
+ open('meson.build', 'w', encoding='utf-8').write(
+ hello_cs_meson_template.format(project_name=self.name,
+ exe_name=self.name,
+ source_name=source_name,
+ version=self.version))
+
+ def create_library(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ uppercase_token = lowercase_token.upper()
+ class_name = uppercase_token[0] + lowercase_token[1:]
+ class_test = uppercase_token[0] + lowercase_token[1:] + '_test'
+ project_test = lowercase_token + '_test'
+ lib_cs_name = uppercase_token[0] + lowercase_token[1:] + '.cs'
+ test_cs_name = uppercase_token[0] + lowercase_token[1:] + '_test.cs'
+ kwargs = {'utoken': uppercase_token,
+ 'ltoken': lowercase_token,
+ 'class_test': class_test,
+ 'class_name': class_name,
+ 'source_file': lib_cs_name,
+ 'test_source_file': test_cs_name,
+ 'test_exe_name': project_test,
+ 'project_name': self.name,
+ 'lib_name': lowercase_token,
+ 'test_name': lowercase_token,
+ 'version': self.version,
+ }
+ open(lib_cs_name, 'w', encoding='utf-8').write(lib_cs_template.format(**kwargs))
+ open(test_cs_name, 'w', encoding='utf-8').write(lib_cs_test_template.format(**kwargs))
+ open('meson.build', 'w', encoding='utf-8').write(lib_cs_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/ctemplates.py b/meson/mesonbuild/templates/ctemplates.py
new file mode 100644
index 000000000..9b651bc8b
--- /dev/null
+++ b/meson/mesonbuild/templates/ctemplates.py
@@ -0,0 +1,166 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+lib_h_template = '''#pragma once
+#if defined _WIN32 || defined __CYGWIN__
+ #ifdef BUILDING_{utoken}
+ #define {utoken}_PUBLIC __declspec(dllexport)
+ #else
+ #define {utoken}_PUBLIC __declspec(dllimport)
+ #endif
+#else
+ #ifdef BUILDING_{utoken}
+ #define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
+ #else
+ #define {utoken}_PUBLIC
+ #endif
+#endif
+
+int {utoken}_PUBLIC {function_name}();
+
+'''
+
+lib_c_template = '''#include <{header_file}>
+
+/* This function will not be exported and is not
+ * directly callable by users of this library.
+ */
+int internal_function() {{
+ return 0;
+}}
+
+int {function_name}() {{
+ return internal_function();
+}}
+'''
+
+lib_c_test_template = '''#include <{header_file}>
+#include <stdio.h>
+
+int main(int argc, char **argv) {{
+ if(argc != 1) {{
+ printf("%s takes no arguments.\\n", argv[0]);
+ return 1;
+ }}
+ return {function_name}();
+}}
+'''
+
+lib_c_meson_template = '''project('{project_name}', 'c',
+ version : '{version}',
+ default_options : ['warning_level=3'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+ install : true,
+ c_args : lib_args,
+ gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+ link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+ include_directories: include_directories('.'),
+ link_with : shlib)
+
+# Make this library usable from the system's
+# package manager.
+install_headers('{header_file}', subdir : '{header_dir}')
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+ name : '{project_name}',
+ filebase : '{ltoken}',
+ description : 'Meson sample project.',
+ subdirs : '{header_dir}',
+ libraries : shlib,
+ version : '{version}',
+)
+'''
+
+hello_c_template = '''#include <stdio.h>
+
+#define PROJECT_NAME "{project_name}"
+
+int main(int argc, char **argv) {{
+ if(argc != 1) {{
+ printf("%s takes no arguments.\\n", argv[0]);
+ return 1;
+ }}
+ printf("This is project %s.\\n", PROJECT_NAME);
+ return 0;
+}}
+'''
+
+hello_c_meson_template = '''project('{project_name}', 'c',
+ version : '{version}',
+ default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+ install : true)
+
+test('basic', exe)
+'''
+
+
+class CProject(SampleImpl):
+ def __init__(self, options):
+ super().__init__()
+ self.name = options.name
+ self.version = options.version
+
+ def create_executable(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ source_name = lowercase_token + '.c'
+ open(source_name, 'w', encoding='utf-8').write(hello_c_template.format(project_name=self.name))
+ open('meson.build', 'w', encoding='utf-8').write(
+ hello_c_meson_template.format(project_name=self.name,
+ exe_name=lowercase_token,
+ source_name=source_name,
+ version=self.version))
+
+ def create_library(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ uppercase_token = lowercase_token.upper()
+ function_name = lowercase_token[0:3] + '_func'
+ test_exe_name = lowercase_token + '_test'
+ lib_h_name = lowercase_token + '.h'
+ lib_c_name = lowercase_token + '.c'
+ test_c_name = lowercase_token + '_test.c'
+ kwargs = {'utoken': uppercase_token,
+ 'ltoken': lowercase_token,
+ 'header_dir': lowercase_token,
+ 'function_name': function_name,
+ 'header_file': lib_h_name,
+ 'source_file': lib_c_name,
+ 'test_source_file': test_c_name,
+ 'test_exe_name': test_exe_name,
+ 'project_name': self.name,
+ 'lib_name': lowercase_token,
+ 'test_name': lowercase_token,
+ 'version': self.version,
+ }
+ open(lib_h_name, 'w', encoding='utf-8').write(lib_h_template.format(**kwargs))
+ open(lib_c_name, 'w', encoding='utf-8').write(lib_c_template.format(**kwargs))
+ open(test_c_name, 'w', encoding='utf-8').write(lib_c_test_template.format(**kwargs))
+ open('meson.build', 'w', encoding='utf-8').write(lib_c_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/cudatemplates.py b/meson/mesonbuild/templates/cudatemplates.py
new file mode 100644
index 000000000..919db21be
--- /dev/null
+++ b/meson/mesonbuild/templates/cudatemplates.py
@@ -0,0 +1,185 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_cuda_template = '''#include <iostream>
+
+#define PROJECT_NAME "{project_name}"
+
+int main(int argc, char **argv) {{
+ if(argc != 1) {{
+ std::cout << argv[0] << "takes no arguments.\\n";
+ return 1;
+ }}
+ std::cout << "This is project " << PROJECT_NAME << ".\\n";
+ return 0;
+}}
+'''
+
+hello_cuda_meson_template = '''project('{project_name}', ['cuda', 'cpp'],
+ version : '{version}',
+ default_options : ['warning_level=3',
+ 'cpp_std=c++14'])
+
+exe = executable('{exe_name}', '{source_name}',
+ install : true)
+
+test('basic', exe)
+'''
+
+lib_h_template = '''#pragma once
+#if defined _WIN32 || defined __CYGWIN__
+ #ifdef BUILDING_{utoken}
+ #define {utoken}_PUBLIC __declspec(dllexport)
+ #else
+ #define {utoken}_PUBLIC __declspec(dllimport)
+ #endif
+#else
+ #ifdef BUILDING_{utoken}
+ #define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
+ #else
+ #define {utoken}_PUBLIC
+ #endif
+#endif
+
+namespace {namespace} {{
+
+class {utoken}_PUBLIC {class_name} {{
+
+public:
+ {class_name}();
+ int get_number() const;
+
+private:
+
+ int number;
+
+}};
+
+}}
+
+'''
+
+lib_cuda_template = '''#include <{header_file}>
+
+namespace {namespace} {{
+
+{class_name}::{class_name}() {{
+ number = 6;
+}}
+
+int {class_name}::get_number() const {{
+ return number;
+}}
+
+}}
+'''
+
+lib_cuda_test_template = '''#include <{header_file}>
+#include <iostream>
+
+int main(int argc, char **argv) {{
+ if(argc != 1) {{
+ std::cout << argv[0] << " takes no arguments.\\n";
+ return 1;
+ }}
+ {namespace}::{class_name} c;
+ return c.get_number() != 6;
+}}
+'''
+
+lib_cuda_meson_template = '''project('{project_name}', ['cuda', 'cpp'],
+ version : '{version}',
+ default_options : ['warning_level=3'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+ install : true,
+ cpp_args : lib_args,
+ gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+ link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+ include_directories: include_directories('.'),
+ link_with : shlib)
+
+# Make this library usable from the system's
+# package manager.
+install_headers('{header_file}', subdir : '{header_dir}')
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+ name : '{project_name}',
+ filebase : '{ltoken}',
+ description : 'Meson sample project.',
+ subdirs : '{header_dir}',
+ libraries : shlib,
+ version : '{version}',
+)
+'''
+
+
+class CudaProject(SampleImpl):
+ def __init__(self, options):
+ super().__init__()
+ self.name = options.name
+ self.version = options.version
+
+ def create_executable(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ source_name = lowercase_token + '.cu'
+ open(source_name, 'w', encoding='utf-8').write(hello_cuda_template.format(project_name=self.name))
+ open('meson.build', 'w', encoding='utf-8').write(
+ hello_cuda_meson_template.format(project_name=self.name,
+ exe_name=lowercase_token,
+ source_name=source_name,
+ version=self.version))
+
+ def create_library(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ uppercase_token = lowercase_token.upper()
+ class_name = uppercase_token[0] + lowercase_token[1:]
+ test_exe_name = lowercase_token + '_test'
+ namespace = lowercase_token
+ lib_h_name = lowercase_token + '.h'
+ lib_cuda_name = lowercase_token + '.cu'
+ test_cuda_name = lowercase_token + '_test.cu'
+ kwargs = {'utoken': uppercase_token,
+ 'ltoken': lowercase_token,
+ 'header_dir': lowercase_token,
+ 'class_name': class_name,
+ 'namespace': namespace,
+ 'header_file': lib_h_name,
+ 'source_file': lib_cuda_name,
+ 'test_source_file': test_cuda_name,
+ 'test_exe_name': test_exe_name,
+ 'project_name': self.name,
+ 'lib_name': lowercase_token,
+ 'test_name': lowercase_token,
+ 'version': self.version,
+ }
+ open(lib_h_name, 'w', encoding='utf-8').write(lib_h_template.format(**kwargs))
+ open(lib_cuda_name, 'w', encoding='utf-8').write(lib_cuda_template.format(**kwargs))
+ open(test_cuda_name, 'w', encoding='utf-8').write(lib_cuda_test_template.format(**kwargs))
+ open('meson.build', 'w', encoding='utf-8').write(lib_cuda_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/dlangtemplates.py b/meson/mesonbuild/templates/dlangtemplates.py
new file mode 100644
index 000000000..3d939d885
--- /dev/null
+++ b/meson/mesonbuild/templates/dlangtemplates.py
@@ -0,0 +1,143 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_d_template = '''module main;
+import std.stdio;
+
+enum PROJECT_NAME = "{project_name}";
+
+int main(string[] args) {{
+ if (args.length != 1){{
+ writefln("%s takes no arguments.\\n", args[0]);
+ return 1;
+ }}
+ writefln("This is project %s.\\n", PROJECT_NAME);
+ return 0;
+}}
+'''
+
+hello_d_meson_template = '''project('{project_name}', 'd',
+ version : '{version}',
+ default_options: ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+ install : true)
+
+test('basic', exe)
+'''
+
+lib_d_template = '''module {module_file};
+
+/* This function will not be exported and is not
+ * directly callable by users of this library.
+ */
+int internal_function() {{
+ return 0;
+}}
+
+int {function_name}() {{
+ return internal_function();
+}}
+'''
+
+lib_d_test_template = '''module {module_file}_test;
+import std.stdio;
+import {module_file};
+
+
+int main(string[] args) {{
+ if (args.length != 1){{
+ writefln("%s takes no arguments.\\n", args[0]);
+ return 1;
+ }}
+ return {function_name}();
+}}
+'''
+
+lib_d_meson_template = '''project('{project_name}', 'd',
+ version : '{version}',
+ default_options : ['warning_level=3'])
+
+stlib = static_library('{lib_name}', '{source_file}',
+ install : true,
+ gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+ link_with : stlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+ include_directories: include_directories('.'),
+ link_with : stlib)
+
+# Make this library usable from the Dlang
+# build system.
+dlang_mod = import('dlang')
+if find_program('dub', required: false).found()
+ dlang_mod.generate_dub_file(meson.project_name().to_lower(), meson.source_root(),
+ name : meson.project_name(),
+ license: meson.project_license(),
+ sourceFiles : '{source_file}',
+ description : 'Meson sample project.',
+ version : '{version}',
+ )
+endif
+'''
+
+
+class DlangProject(SampleImpl):
+ def __init__(self, options):
+ super().__init__()
+ self.name = options.name
+ self.version = options.version
+
+ def create_executable(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ source_name = lowercase_token + '.d'
+ open(source_name, 'w', encoding='utf-8').write(hello_d_template.format(project_name=self.name))
+ open('meson.build', 'w', encoding='utf-8').write(
+ hello_d_meson_template.format(project_name=self.name,
+ exe_name=lowercase_token,
+ source_name=source_name,
+ version=self.version))
+
+ def create_library(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ uppercase_token = lowercase_token.upper()
+ function_name = lowercase_token[0:3] + '_func'
+ test_exe_name = lowercase_token + '_test'
+ lib_m_name = lowercase_token
+ lib_d_name = lowercase_token + '.d'
+ test_d_name = lowercase_token + '_test.d'
+ kwargs = {'utoken': uppercase_token,
+ 'ltoken': lowercase_token,
+ 'header_dir': lowercase_token,
+ 'function_name': function_name,
+ 'module_file': lib_m_name,
+ 'source_file': lib_d_name,
+ 'test_source_file': test_d_name,
+ 'test_exe_name': test_exe_name,
+ 'project_name': self.name,
+ 'lib_name': lowercase_token,
+ 'test_name': lowercase_token,
+ 'version': self.version,
+ }
+ open(lib_d_name, 'w', encoding='utf-8').write(lib_d_template.format(**kwargs))
+ open(test_d_name, 'w', encoding='utf-8').write(lib_d_test_template.format(**kwargs))
+ open('meson.build', 'w', encoding='utf-8').write(lib_d_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/fortrantemplates.py b/meson/mesonbuild/templates/fortrantemplates.py
new file mode 100644
index 000000000..8fc1bca35
--- /dev/null
+++ b/meson/mesonbuild/templates/fortrantemplates.py
@@ -0,0 +1,140 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+lib_fortran_template = '''
+! This procedure will not be exported and is not
+! directly callable by users of this library.
+
+module modfoo
+
+implicit none
+private
+public :: {function_name}
+
+contains
+
+integer function internal_function()
+ internal_function = 0
+end function internal_function
+
+integer function {function_name}()
+ {function_name} = internal_function()
+end function {function_name}
+
+end module modfoo
+'''
+
+lib_fortran_test_template = '''
+use modfoo
+
+print *,{function_name}()
+
+end program
+'''
+
+lib_fortran_meson_template = '''project('{project_name}', 'fortran',
+ version : '{version}',
+ default_options : ['warning_level=3'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+ install : true,
+ fortran_args : lib_args,
+ gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+ link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+ include_directories: include_directories('.'),
+ link_with : shlib)
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+ name : '{project_name}',
+ filebase : '{ltoken}',
+ description : 'Meson sample project.',
+ subdirs : '{header_dir}',
+ libraries : shlib,
+ version : '{version}',
+)
+'''
+
+hello_fortran_template = '''
+implicit none
+
+character(len=*), parameter :: PROJECT_NAME = "{project_name}"
+
+print *,"This is project ", PROJECT_NAME
+
+end program
+'''
+
+hello_fortran_meson_template = '''project('{project_name}', 'fortran',
+ version : '{version}',
+ default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+ install : true)
+
+test('basic', exe)
+'''
+
+
+class FortranProject(SampleImpl):
+ def __init__(self, options):
+ super().__init__()
+ self.name = options.name
+ self.version = options.version
+
+ def create_executable(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ source_name = lowercase_token + '.f90'
+ open(source_name, 'w', encoding='utf-8').write(hello_fortran_template.format(project_name=self.name))
+ open('meson.build', 'w', encoding='utf-8').write(
+ hello_fortran_meson_template.format(project_name=self.name,
+ exe_name=lowercase_token,
+ source_name=source_name,
+ version=self.version))
+
+ def create_library(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ uppercase_token = lowercase_token.upper()
+ function_name = lowercase_token[0:3] + '_func'
+ test_exe_name = lowercase_token + '_test'
+ lib_fortran_name = lowercase_token + '.f90'
+ test_fortran_name = lowercase_token + '_test.f90'
+ kwargs = {'utoken': uppercase_token,
+ 'ltoken': lowercase_token,
+ 'header_dir': lowercase_token,
+ 'function_name': function_name,
+ 'source_file': lib_fortran_name,
+ 'test_source_file': test_fortran_name,
+ 'test_exe_name': test_exe_name,
+ 'project_name': self.name,
+ 'lib_name': lowercase_token,
+ 'test_name': lowercase_token,
+ 'version': self.version,
+ }
+ open(lib_fortran_name, 'w', encoding='utf-8').write(lib_fortran_template.format(**kwargs))
+ open(test_fortran_name, 'w', encoding='utf-8').write(lib_fortran_test_template.format(**kwargs))
+ open('meson.build', 'w', encoding='utf-8').write(lib_fortran_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/javatemplates.py b/meson/mesonbuild/templates/javatemplates.py
new file mode 100644
index 000000000..e4329611d
--- /dev/null
+++ b/meson/mesonbuild/templates/javatemplates.py
@@ -0,0 +1,136 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_java_template = '''
+
+public class {class_name} {{
+ final static String PROJECT_NAME = "{project_name}";
+
+ public static void main (String args[]) {{
+ if(args.length != 0) {{
+ System.out.println(args + " takes no arguments.");
+ System.exit(0);
+ }}
+ System.out.println("This is project " + PROJECT_NAME + ".");
+ System.exit(0);
+ }}
+}}
+
+'''
+
+hello_java_meson_template = '''project('{project_name}', 'java',
+ version : '{version}',
+ default_options : ['warning_level=3'])
+
+exe = jar('{exe_name}', '{source_name}',
+ main_class : '{exe_name}',
+ install : true)
+
+test('basic', exe)
+'''
+
+lib_java_template = '''
+
+public class {class_name} {{
+ final static int number = 6;
+
+ public final int get_number() {{
+ return number;
+ }}
+}}
+
+'''
+
+lib_java_test_template = '''
+
+public class {class_test} {{
+ public static void main (String args[]) {{
+ if(args.length != 0) {{
+ System.out.println(args + " takes no arguments.");
+ System.exit(1);
+ }}
+
+ {class_name} c = new {class_name}();
+ Boolean result = true;
+ System.exit(result.compareTo(c.get_number() != 6));
+ }}
+}}
+
+'''
+
+lib_java_meson_template = '''project('{project_name}', 'java',
+ version : '{version}',
+ default_options : ['warning_level=3'])
+
+jarlib = jar('{class_name}', '{source_file}',
+ main_class : '{class_name}',
+ install : true,
+)
+
+test_jar = jar('{class_test}', '{test_source_file}',
+ main_class : '{class_test}',
+ link_with : jarlib)
+test('{test_name}', test_jar)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+ include_directories: include_directories('.'),
+ link_with : jarlib)
+'''
+
+
+class JavaProject(SampleImpl):
+ def __init__(self, options):
+ super().__init__()
+ self.name = options.name
+ self.version = options.version
+
+ def create_executable(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ uppercase_token = lowercase_token.upper()
+ class_name = uppercase_token[0] + lowercase_token[1:]
+ source_name = uppercase_token[0] + lowercase_token[1:] + '.java'
+ open(source_name, 'w', encoding='utf-8').write(
+ hello_java_template.format(project_name=self.name,
+ class_name=class_name))
+ open('meson.build', 'w', encoding='utf-8').write(
+ hello_java_meson_template.format(project_name=self.name,
+ exe_name=class_name,
+ source_name=source_name,
+ version=self.version))
+
+ def create_library(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ uppercase_token = lowercase_token.upper()
+ class_name = uppercase_token[0] + lowercase_token[1:]
+ class_test = uppercase_token[0] + lowercase_token[1:] + '_test'
+ lib_java_name = uppercase_token[0] + lowercase_token[1:] + '.java'
+ test_java_name = uppercase_token[0] + lowercase_token[1:] + '_test.java'
+ kwargs = {'utoken': uppercase_token,
+ 'ltoken': lowercase_token,
+ 'class_test': class_test,
+ 'class_name': class_name,
+ 'source_file': lib_java_name,
+ 'test_source_file': test_java_name,
+ 'project_name': self.name,
+ 'lib_name': lowercase_token,
+ 'test_name': lowercase_token,
+ 'version': self.version,
+ }
+ open(lib_java_name, 'w', encoding='utf-8').write(lib_java_template.format(**kwargs))
+ open(test_java_name, 'w', encoding='utf-8').write(lib_java_test_template.format(**kwargs))
+ open('meson.build', 'w', encoding='utf-8').write(lib_java_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/mesontemplates.py b/meson/mesonbuild/templates/mesontemplates.py
new file mode 100644
index 000000000..a29ac6fdc
--- /dev/null
+++ b/meson/mesonbuild/templates/mesontemplates.py
@@ -0,0 +1,75 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+
+meson_executable_template = '''project('{project_name}', '{language}',
+ version : '{version}',
+ default_options : [{default_options}])
+
+executable('{executable}',
+ {sourcespec},{depspec}
+ install : true)
+'''
+
+
+meson_jar_template = '''project('{project_name}', '{language}',
+ version : '{version}',
+ default_options : [{default_options}])
+
+jar('{executable}',
+ {sourcespec},{depspec}
+ main_class: '{main_class}',
+ install : true)
+'''
+
+
+def create_meson_build(options: argparse.Namespace) -> None:
+ if options.type != 'executable':
+ raise SystemExit('\nGenerating a meson.build file from existing sources is\n'
+ 'supported only for project type "executable".\n'
+ 'Run meson init in an empty directory to create a sample project.')
+ default_options = ['warning_level=3']
+ if options.language == 'cpp':
+ # This shows how to set this very common option.
+ default_options += ['cpp_std=c++14']
+ # If we get a meson.build autoformatter one day, this code could
+ # be simplified quite a bit.
+ formatted_default_options = ', '.join(f"'{x}'" for x in default_options)
+ sourcespec = ',\n '.join(f"'{x}'" for x in options.srcfiles)
+ depspec = ''
+ if options.deps:
+ depspec = '\n dependencies : [\n '
+ depspec += ',\n '.join(f"dependency('{x}')"
+ for x in options.deps.split(','))
+ depspec += '],'
+ if options.language != 'java':
+ content = meson_executable_template.format(project_name=options.name,
+ language=options.language,
+ version=options.version,
+ executable=options.executable,
+ sourcespec=sourcespec,
+ depspec=depspec,
+ default_options=formatted_default_options)
+ else:
+ content = meson_jar_template.format(project_name=options.name,
+ language=options.language,
+ version=options.version,
+ executable=options.executable,
+ main_class=options.name,
+ sourcespec=sourcespec,
+ depspec=depspec,
+ default_options=formatted_default_options)
+ open('meson.build', 'w', encoding='utf-8').write(content)
+ print('Generated meson.build file:\n\n' + content)
diff --git a/meson/mesonbuild/templates/objcpptemplates.py b/meson/mesonbuild/templates/objcpptemplates.py
new file mode 100644
index 000000000..4f61d6c60
--- /dev/null
+++ b/meson/mesonbuild/templates/objcpptemplates.py
@@ -0,0 +1,167 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+lib_h_template = '''#pragma once
+#if defined _WIN32 || defined __CYGWIN__
+ #ifdef BUILDING_{utoken}
+ #define {utoken}_PUBLIC __declspec(dllexport)
+ #else
+ #define {utoken}_PUBLIC __declspec(dllimport)
+ #endif
+#else
+ #ifdef BUILDING_{utoken}
+ #define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
+ #else
+ #define {utoken}_PUBLIC
+ #endif
+#endif
+
+int {utoken}_PUBLIC {function_name}();
+
+'''
+
+lib_objcpp_template = '''#import <{header_file}>
+
+/* This function will not be exported and is not
+ * directly callable by users of this library.
+ */
+int internal_function() {{
+ return 0;
+}}
+
+int {function_name}() {{
+ return internal_function();
+}}
+'''
+
+lib_objcpp_test_template = '''#import <{header_file}>
+#import <iostream>
+
+int main(int argc, char **argv) {{
+ if(argc != 1) {{
+ std::cout << argv[0] << " takes no arguments." << std::endl;
+ return 1;
+ }}
+ return {function_name}();
+}}
+'''
+
+lib_objcpp_meson_template = '''project('{project_name}', 'objcpp',
+ version : '{version}',
+ default_options : ['warning_level=3'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+ install : true,
+ objcpp_args : lib_args,
+ gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+ link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+ include_directories: include_directories('.'),
+ link_with : shlib)
+
+# Make this library usable from the system's
+# package manager.
+install_headers('{header_file}', subdir : '{header_dir}')
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+ name : '{project_name}',
+ filebase : '{ltoken}',
+ description : 'Meson sample project.',
+ subdirs : '{header_dir}',
+ libraries : shlib,
+ version : '{version}',
+)
+'''
+
+hello_objcpp_template = '''#import <iostream>
+
+#define PROJECT_NAME "{project_name}"
+
+int main(int argc, char **argv) {{
+ if(argc != 1) {{
+ std::cout << argv[0] << " takes no arguments." << std::endl;
+ return 1;
+ }}
+ std::cout << "This is project " << PROJECT_NAME << "." << std::endl;
+ return 0;
+}}
+'''
+
+hello_objcpp_meson_template = '''project('{project_name}', 'objcpp',
+ version : '{version}',
+ default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+ install : true)
+
+test('basic', exe)
+'''
+
+
+class ObjCppProject(SampleImpl):
+ def __init__(self, options):
+ super().__init__()
+ self.name = options.name
+ self.version = options.version
+
+ def create_executable(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ source_name = lowercase_token + '.mm'
+ open(source_name, 'w', encoding='utf-8').write(hello_objcpp_template.format(project_name=self.name))
+ open('meson.build', 'w', encoding='utf-8').write(
+ hello_objcpp_meson_template.format(project_name=self.name,
+ exe_name=lowercase_token,
+ source_name=source_name,
+ version=self.version))
+
+ def create_library(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ uppercase_token = lowercase_token.upper()
+ function_name = lowercase_token[0:3] + '_func'
+ test_exe_name = lowercase_token + '_test'
+ lib_h_name = lowercase_token + '.h'
+ lib_objcpp_name = lowercase_token + '.mm'
+ test_objcpp_name = lowercase_token + '_test.mm'
+ kwargs = {'utoken': uppercase_token,
+ 'ltoken': lowercase_token,
+ 'header_dir': lowercase_token,
+ 'function_name': function_name,
+ 'header_file': lib_h_name,
+ 'source_file': lib_objcpp_name,
+ 'test_source_file': test_objcpp_name,
+ 'test_exe_name': test_exe_name,
+ 'project_name': self.name,
+ 'lib_name': lowercase_token,
+ 'test_name': lowercase_token,
+ 'version': self.version,
+ }
+ open(lib_h_name, 'w', encoding='utf-8').write(lib_h_template.format(**kwargs))
+ open(lib_objcpp_name, 'w', encoding='utf-8').write(lib_objcpp_template.format(**kwargs))
+ open(test_objcpp_name, 'w', encoding='utf-8').write(lib_objcpp_test_template.format(**kwargs))
+ open('meson.build', 'w', encoding='utf-8').write(lib_objcpp_meson_template.format(**kwargs))
+
diff --git a/meson/mesonbuild/templates/objctemplates.py b/meson/mesonbuild/templates/objctemplates.py
new file mode 100644
index 000000000..dac638d14
--- /dev/null
+++ b/meson/mesonbuild/templates/objctemplates.py
@@ -0,0 +1,166 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+lib_h_template = '''#pragma once
+#if defined _WIN32 || defined __CYGWIN__
+ #ifdef BUILDING_{utoken}
+ #define {utoken}_PUBLIC __declspec(dllexport)
+ #else
+ #define {utoken}_PUBLIC __declspec(dllimport)
+ #endif
+#else
+ #ifdef BUILDING_{utoken}
+ #define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
+ #else
+ #define {utoken}_PUBLIC
+ #endif
+#endif
+
+int {utoken}_PUBLIC {function_name}();
+
+'''
+
+lib_objc_template = '''#import <{header_file}>
+
+/* This function will not be exported and is not
+ * directly callable by users of this library.
+ */
+int internal_function() {{
+ return 0;
+}}
+
+int {function_name}() {{
+ return internal_function();
+}}
+'''
+
+lib_objc_test_template = '''#import <{header_file}>
+#import <stdio.h>
+
+int main(int argc, char **argv) {{
+ if(argc != 1) {{
+ printf("%s takes no arguments.\\n", argv[0]);
+ return 1;
+ }}
+ return {function_name}();
+}}
+'''
+
+lib_objc_meson_template = '''project('{project_name}', 'objc',
+ version : '{version}',
+ default_options : ['warning_level=3'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+ install : true,
+ objc_args : lib_args,
+ gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+ link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+ include_directories: include_directories('.'),
+ link_with : shlib)
+
+# Make this library usable from the system's
+# package manager.
+install_headers('{header_file}', subdir : '{header_dir}')
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+ name : '{project_name}',
+ filebase : '{ltoken}',
+ description : 'Meson sample project.',
+ subdirs : '{header_dir}',
+ libraries : shlib,
+ version : '{version}',
+)
+'''
+
+hello_objc_template = '''#import <stdio.h>
+
+#define PROJECT_NAME "{project_name}"
+
+int main(int argc, char **argv) {{
+ if(argc != 1) {{
+ printf("%s takes no arguments.\\n", argv[0]);
+ return 1;
+ }}
+ printf("This is project %s.\\n", PROJECT_NAME);
+ return 0;
+}}
+'''
+
+hello_objc_meson_template = '''project('{project_name}', 'objc',
+ version : '{version}',
+ default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+ install : true)
+
+test('basic', exe)
+'''
+
+
+class ObjCProject(SampleImpl):
+ def __init__(self, options):
+ super().__init__()
+ self.name = options.name
+ self.version = options.version
+
+ def create_executable(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ source_name = lowercase_token + '.m'
+ open(source_name, 'w', encoding='utf-8').write(hello_objc_template.format(project_name=self.name))
+ open('meson.build', 'w', encoding='utf-8').write(
+ hello_objc_meson_template.format(project_name=self.name,
+ exe_name=lowercase_token,
+ source_name=source_name,
+ version=self.version))
+
+ def create_library(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ uppercase_token = lowercase_token.upper()
+ function_name = lowercase_token[0:3] + '_func'
+ test_exe_name = lowercase_token + '_test'
+ lib_h_name = lowercase_token + '.h'
+ lib_objc_name = lowercase_token + '.m'
+ test_objc_name = lowercase_token + '_test.m'
+ kwargs = {'utoken': uppercase_token,
+ 'ltoken': lowercase_token,
+ 'header_dir': lowercase_token,
+ 'function_name': function_name,
+ 'header_file': lib_h_name,
+ 'source_file': lib_objc_name,
+ 'test_source_file': test_objc_name,
+ 'test_exe_name': test_exe_name,
+ 'project_name': self.name,
+ 'lib_name': lowercase_token,
+ 'test_name': lowercase_token,
+ 'version': self.version,
+ }
+ open(lib_h_name, 'w', encoding='utf-8').write(lib_h_template.format(**kwargs))
+ open(lib_objc_name, 'w', encoding='utf-8').write(lib_objc_template.format(**kwargs))
+ open(test_objc_name, 'w', encoding='utf-8').write(lib_objc_test_template.format(**kwargs))
+ open('meson.build', 'w', encoding='utf-8').write(lib_objc_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/rusttemplates.py b/meson/mesonbuild/templates/rusttemplates.py
new file mode 100644
index 000000000..95a937cca
--- /dev/null
+++ b/meson/mesonbuild/templates/rusttemplates.py
@@ -0,0 +1,113 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+lib_rust_template = '''#![crate_name = "{crate_file}"]
+
+/* This function will not be exported and is not
+ * directly callable by users of this library.
+ */
+fn internal_function() -> i32 {{
+ return 0;
+}}
+
+pub fn {function_name}() -> i32 {{
+ return internal_function();
+}}
+'''
+
+lib_rust_test_template = '''extern crate {crate_file};
+
+fn main() {{
+ println!("printing: {{}}", {crate_file}::{function_name}());
+}}
+'''
+
+
+lib_rust_meson_template = '''project('{project_name}', 'rust',
+ version : '{version}',
+ default_options : ['warning_level=3'])
+
+shlib = static_library('{lib_name}', '{source_file}', install : true)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+ link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+ include_directories: include_directories('.'),
+ link_with : shlib)
+'''
+
+hello_rust_template = '''
+fn main() {{
+ let project_name = "{project_name}";
+ println!("This is project {{}}.\\n", project_name);
+}}
+'''
+
+hello_rust_meson_template = '''project('{project_name}', 'rust',
+ version : '{version}',
+ default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+ install : true)
+
+test('basic', exe)
+'''
+
+
+class RustProject(SampleImpl):
+ def __init__(self, options):
+ super().__init__()
+ self.name = options.name
+ self.version = options.version
+
+ def create_executable(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ source_name = lowercase_token + '.rs'
+ open(source_name, 'w', encoding='utf-8').write(hello_rust_template.format(project_name=self.name))
+ open('meson.build', 'w', encoding='utf-8').write(
+ hello_rust_meson_template.format(project_name=self.name,
+ exe_name=lowercase_token,
+ source_name=source_name,
+ version=self.version))
+
+ def create_library(self) -> None:
+ lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+ uppercase_token = lowercase_token.upper()
+ function_name = lowercase_token[0:3] + '_func'
+ test_exe_name = lowercase_token + '_test'
+ lib_crate_name = lowercase_token
+ lib_rs_name = lowercase_token + '.rs'
+ test_rs_name = lowercase_token + '_test.rs'
+ kwargs = {'utoken': uppercase_token,
+ 'ltoken': lowercase_token,
+ 'header_dir': lowercase_token,
+ 'function_name': function_name,
+ 'crate_file': lib_crate_name,
+ 'source_file': lib_rs_name,
+ 'test_source_file': test_rs_name,
+ 'test_exe_name': test_exe_name,
+ 'project_name': self.name,
+ 'lib_name': lowercase_token,
+ 'test_name': lowercase_token,
+ 'version': self.version,
+ }
+ open(lib_rs_name, 'w', encoding='utf-8').write(lib_rust_template.format(**kwargs))
+ open(test_rs_name, 'w', encoding='utf-8').write(lib_rust_test_template.format(**kwargs))
+ open('meson.build', 'w', encoding='utf-8').write(lib_rust_meson_template.format(**kwargs))
diff --git a/meson/mesonbuild/templates/samplefactory.py b/meson/mesonbuild/templates/samplefactory.py
new file mode 100644
index 000000000..1192e138a
--- /dev/null
+++ b/meson/mesonbuild/templates/samplefactory.py
@@ -0,0 +1,40 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from mesonbuild.templates.fortrantemplates import FortranProject
+from mesonbuild.templates.objcpptemplates import ObjCppProject
+from mesonbuild.templates.dlangtemplates import DlangProject
+from mesonbuild.templates.rusttemplates import RustProject
+from mesonbuild.templates.javatemplates import JavaProject
+from mesonbuild.templates.cudatemplates import CudaProject
+from mesonbuild.templates.objctemplates import ObjCProject
+from mesonbuild.templates.cpptemplates import CppProject
+from mesonbuild.templates.cstemplates import CSharpProject
+from mesonbuild.templates.ctemplates import CProject
+from mesonbuild.templates.sampleimpl import SampleImpl
+
+import argparse
+
+def sameple_generator(options: argparse.Namespace) -> SampleImpl:
+ return {
+ 'c': CProject,
+ 'cpp': CppProject,
+ 'cs': CSharpProject,
+ 'cuda': CudaProject,
+ 'objc': ObjCProject,
+ 'objcpp': ObjCppProject,
+ 'java': JavaProject,
+ 'd': DlangProject,
+ 'rust': RustProject,
+ 'fortran': FortranProject
+ }[options.language](options)
diff --git a/meson/mesonbuild/templates/sampleimpl.py b/meson/mesonbuild/templates/sampleimpl.py
new file mode 100644
index 000000000..2d1498b83
--- /dev/null
+++ b/meson/mesonbuild/templates/sampleimpl.py
@@ -0,0 +1,21 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class SampleImpl:
+ def create_executable(self) -> None:
+ raise NotImplementedError('Sample implementation for "executable" not implemented!')
+
+ def create_library(self) -> None:
+ raise NotImplementedError('Sample implementation for "library" not implemented!')
diff --git a/meson/mesonbuild/wrap/__init__.py b/meson/mesonbuild/wrap/__init__.py
new file mode 100644
index 000000000..653f42ab9
--- /dev/null
+++ b/meson/mesonbuild/wrap/__init__.py
@@ -0,0 +1,59 @@
+from enum import Enum
+
+# Used for the --wrap-mode command-line argument
+#
+# Special wrap modes:
+# nofallback: Don't download wraps for dependency() fallbacks
+# nodownload: Don't download wraps for all subproject() calls
+#
+# subprojects are used for two purposes:
+# 1. To download and build dependencies by using .wrap
+# files if they are not provided by the system. This is
+# usually expressed via dependency(..., fallback: ...).
+# 2. To download and build 'copylibs' which are meant to be
+# used by copying into your project. This is always done
+# with an explicit subproject() call.
+#
+# --wrap-mode=nofallback will never do (1)
+# --wrap-mode=nodownload will do neither (1) nor (2)
+#
+# If you are building from a release tarball, you should be
+# able to safely use 'nodownload' since upstream is
+# expected to ship all required sources with the tarball.
+#
+# If you are building from a git repository, you will want
+# to use 'nofallback' so that any 'copylib' wraps will be
+# download as subprojects.
+#
+# --wrap-mode=forcefallback will ignore external dependencies,
+# even if they match the version requirements, and automatically
+# use the fallback if one was provided. This is useful for example
+# to make sure a project builds when using the fallbacks.
+#
+# Note that these options do not affect subprojects that
+# are git submodules since those are only usable in git
+# repositories, and you almost always want to download them.
+
+# This did _not_ work when inside the WrapMode class.
+# I don't know why. If you can fix this, patches welcome.
+string_to_value = {'default': 1,
+ 'nofallback': 2,
+ 'nodownload': 3,
+ 'forcefallback': 4,
+ 'nopromote': 5,
+ }
+
+class WrapMode(Enum):
+ default = 1
+ nofallback = 2
+ nodownload = 3
+ forcefallback = 4
+ nopromote = 5
+
+ def __str__(self) -> str:
+ return self.name
+
+ @staticmethod
+ def from_string(mode_name: str) -> 'WrapMode':
+ g = string_to_value[mode_name]
+ return WrapMode(g)
diff --git a/meson/mesonbuild/wrap/wrap.py b/meson/mesonbuild/wrap/wrap.py
new file mode 100644
index 000000000..6c145ab6a
--- /dev/null
+++ b/meson/mesonbuild/wrap/wrap.py
@@ -0,0 +1,607 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import mlog
+import contextlib
+import urllib.request
+import urllib.error
+import urllib.parse
+import os
+import hashlib
+import shutil
+import tempfile
+import stat
+import subprocess
+import sys
+import configparser
+import typing as T
+import textwrap
+
+from pathlib import Path
+from . import WrapMode
+from .. import coredata
+from ..mesonlib import quiet_git, GIT, ProgressBar, MesonException
+from .. import mesonlib
+
+if T.TYPE_CHECKING:
+ import http.client
+
+try:
+ # Importing is just done to check if SSL exists, so all warnings
+ # regarding 'imported but unused' can be safely ignored
+ import ssl # noqa
+ has_ssl = True
+except ImportError:
+ has_ssl = False
+
+REQ_TIMEOUT = 600.0
+SSL_WARNING_PRINTED = False
+WHITELIST_SUBDOMAIN = 'wrapdb.mesonbuild.com'
+
+ALL_TYPES = ['file', 'git', 'hg', 'svn']
+
+def whitelist_wrapdb(urlstr: str) -> urllib.parse.ParseResult:
+ """ raises WrapException if not whitelisted subdomain """
+ url = urllib.parse.urlparse(urlstr)
+ if not url.hostname:
+ raise WrapException(f'{urlstr} is not a valid URL')
+ if not url.hostname.endswith(WHITELIST_SUBDOMAIN):
+ raise WrapException(f'{urlstr} is not a whitelisted WrapDB URL')
+ if has_ssl and not url.scheme == 'https':
+ raise WrapException(f'WrapDB did not have expected SSL https url, instead got {urlstr}')
+ return url
+
+def open_wrapdburl(urlstring: str) -> 'http.client.HTTPResponse':
+ global SSL_WARNING_PRINTED
+
+ url = whitelist_wrapdb(urlstring)
+ if has_ssl:
+ try:
+ return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(url), timeout=REQ_TIMEOUT))
+ except urllib.error.URLError as excp:
+ raise WrapException(f'WrapDB connection failed to {urlstring} with error {excp}')
+
+ # following code is only for those without Python SSL
+ nossl_url = url._replace(scheme='http')
+ if not SSL_WARNING_PRINTED:
+ mlog.warning(f'SSL module not available in {sys.executable}: WrapDB traffic not authenticated.')
+ SSL_WARNING_PRINTED = True
+ try:
+ return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(nossl_url), timeout=REQ_TIMEOUT))
+ except urllib.error.URLError as excp:
+ raise WrapException(f'WrapDB connection failed to {urlstring} with error {excp}')
+
+
+class WrapException(MesonException):
+ pass
+
+class WrapNotFoundException(WrapException):
+ pass
+
+class PackageDefinition:
+ def __init__(self, fname: str):
+ self.filename = fname
+ self.type = None # type: T.Optional[str]
+ self.values = {} # type: T.Dict[str, str]
+ self.provided_deps = {} # type: T.Dict[str, T.Optional[str]]
+ self.provided_programs = [] # type: T.List[str]
+ self.basename = os.path.basename(fname)
+ self.has_wrap = self.basename.endswith('.wrap')
+ self.name = self.basename[:-5] if self.has_wrap else self.basename
+ self.directory = self.name
+ self.provided_deps[self.name] = None
+ self.original_filename = fname
+ self.redirected = False
+ if self.has_wrap:
+ self.parse_wrap()
+ self.directory = self.values.get('directory', self.name)
+ if os.path.dirname(self.directory):
+ raise WrapException('Directory key must be a name and not a path')
+ if self.type and self.type not in ALL_TYPES:
+ raise WrapException(f'Unknown wrap type {self.type!r}')
+ self.filesdir = os.path.join(os.path.dirname(self.filename), 'packagefiles')
+ # What the original file name was before redirection
+
+ def parse_wrap(self) -> None:
+ try:
+ config = configparser.ConfigParser(interpolation=None)
+ config.read(self.filename)
+ except configparser.Error as e:
+ raise WrapException('Failed to parse {}: {}'.format(self.basename, str(e)))
+ self.parse_wrap_section(config)
+ if self.type == 'redirect':
+ # [wrap-redirect] have a `filename` value pointing to the real wrap
+ # file we should parse instead. It must be relative to the current
+ # wrap file location and must be in the form foo/subprojects/bar.wrap.
+ dirname = Path(self.filename).parent
+ fname = Path(self.values['filename'])
+ for i, p in enumerate(fname.parts):
+ if i % 2 == 0:
+ if p == '..':
+ raise WrapException('wrap-redirect filename cannot contain ".."')
+ else:
+ if p != 'subprojects':
+ raise WrapException('wrap-redirect filename must be in the form foo/subprojects/bar.wrap')
+ if fname.suffix != '.wrap':
+ raise WrapException('wrap-redirect filename must be a .wrap file')
+ fname = dirname / fname
+ if not fname.is_file():
+ raise WrapException(f'wrap-redirect {fname} filename does not exist')
+ self.filename = str(fname)
+ self.parse_wrap()
+ self.redirected = True
+ return
+ self.parse_provide_section(config)
+
+ def parse_wrap_section(self, config: configparser.ConfigParser) -> None:
+ if len(config.sections()) < 1:
+ raise WrapException(f'Missing sections in {self.basename}')
+ self.wrap_section = config.sections()[0]
+ if not self.wrap_section.startswith('wrap-'):
+ m = '{!r} is not a valid first section in {}'
+ raise WrapException(m.format(self.wrap_section, self.basename))
+ self.type = self.wrap_section[5:]
+ self.values = dict(config[self.wrap_section])
+
+ def parse_provide_section(self, config: configparser.ConfigParser) -> None:
+ if config.has_section('provide'):
+ for k, v in config['provide'].items():
+ if k == 'dependency_names':
+ # A comma separated list of dependency names that does not
+ # need a variable name
+ names_dict = {n.strip(): None for n in v.split(',')}
+ self.provided_deps.update(names_dict)
+ continue
+ if k == 'program_names':
+ # A comma separated list of program names
+ names_list = [n.strip() for n in v.split(',')]
+ self.provided_programs += names_list
+ continue
+ if not v:
+ m = ('Empty dependency variable name for {!r} in {}. '
+ 'If the subproject uses meson.override_dependency() '
+ 'it can be added in the "dependency_names" special key.')
+ raise WrapException(m.format(k, self.basename))
+ self.provided_deps[k] = v
+
+ def get(self, key: str) -> str:
+ try:
+ return self.values[key]
+ except KeyError:
+ m = 'Missing key {!r} in {}'
+ raise WrapException(m.format(key, self.basename))
+
+def get_directory(subdir_root: str, packagename: str) -> str:
+ fname = os.path.join(subdir_root, packagename + '.wrap')
+ if os.path.isfile(fname):
+ wrap = PackageDefinition(fname)
+ return wrap.directory
+ return packagename
+
+def verbose_git(cmd: T.List[str], workingdir: str, check: bool = False) -> bool:
+ '''
+ Wrapper to convert GitException to WrapException caught in interpreter.
+ '''
+ try:
+ return mesonlib.verbose_git(cmd, workingdir, check=check)
+ except mesonlib.GitException as e:
+ raise WrapException(str(e))
+
+class Resolver:
+ def __init__(self, source_dir: str, subdir: str, wrap_mode: WrapMode = WrapMode.default) -> None:
+ self.source_dir = source_dir
+ self.subdir = subdir
+ self.wrap_mode = wrap_mode
+ self.subdir_root = os.path.join(source_dir, subdir)
+ self.cachedir = os.path.join(self.subdir_root, 'packagecache')
+ self.wraps = {} # type: T.Dict[str, PackageDefinition]
+ self.provided_deps = {} # type: T.Dict[str, PackageDefinition]
+ self.provided_programs = {} # type: T.Dict[str, PackageDefinition]
+ self.load_wraps()
+
+ def load_wraps(self) -> None:
+ if not os.path.isdir(self.subdir_root):
+ return
+ root, dirs, files = next(os.walk(self.subdir_root))
+ for i in files:
+ if not i.endswith('.wrap'):
+ continue
+ fname = os.path.join(self.subdir_root, i)
+ wrap = PackageDefinition(fname)
+ self.wraps[wrap.name] = wrap
+ if wrap.directory in dirs:
+ dirs.remove(wrap.directory)
+ # Add dummy package definition for directories not associated with a wrap file.
+ for i in dirs:
+ if i in ['packagecache', 'packagefiles']:
+ continue
+ fname = os.path.join(self.subdir_root, i)
+ wrap = PackageDefinition(fname)
+ self.wraps[wrap.name] = wrap
+
+ for wrap in self.wraps.values():
+ for k in wrap.provided_deps.keys():
+ if k in self.provided_deps:
+ prev_wrap = self.provided_deps[k]
+ m = 'Multiple wrap files provide {!r} dependency: {} and {}'
+ raise WrapException(m.format(k, wrap.basename, prev_wrap.basename))
+ self.provided_deps[k] = wrap
+ for k in wrap.provided_programs:
+ if k in self.provided_programs:
+ prev_wrap = self.provided_programs[k]
+ m = 'Multiple wrap files provide {!r} program: {} and {}'
+ raise WrapException(m.format(k, wrap.basename, prev_wrap.basename))
+ self.provided_programs[k] = wrap
+
+ def merge_wraps(self, other_resolver: 'Resolver') -> None:
+ for k, v in other_resolver.wraps.items():
+ self.wraps.setdefault(k, v)
+ for k, v in other_resolver.provided_deps.items():
+ self.provided_deps.setdefault(k, v)
+ for k, v in other_resolver.provided_programs.items():
+ self.provided_programs.setdefault(k, v)
+
+ def find_dep_provider(self, packagename: str) -> T.Tuple[T.Optional[str], T.Optional[str]]:
+ # Python's ini parser converts all key values to lowercase.
+ # Thus the query name must also be in lower case.
+ packagename = packagename.lower()
+ wrap = self.provided_deps.get(packagename)
+ if wrap:
+ dep_var = wrap.provided_deps.get(packagename)
+ return wrap.name, dep_var
+ return None, None
+
+ def get_varname(self, subp_name: str, depname: str) -> T.Optional[str]:
+ wrap = self.wraps.get(subp_name)
+ return wrap.provided_deps.get(depname) if wrap else None
+
+ def find_program_provider(self, names: T.List[str]) -> T.Optional[str]:
+ for name in names:
+ wrap = self.provided_programs.get(name)
+ if wrap:
+ return wrap.name
+ return None
+
+ def resolve(self, packagename: str, method: str, current_subproject: str = '') -> str:
+ self.current_subproject = current_subproject
+ self.packagename = packagename
+ self.directory = packagename
+ self.wrap = self.wraps.get(packagename)
+ if not self.wrap:
+ m = 'Neither a subproject directory nor a {}.wrap file was found.'
+ raise WrapNotFoundException(m.format(self.packagename))
+ self.directory = self.wrap.directory
+
+ if self.wrap.has_wrap:
+ # We have a .wrap file, source code will be placed into main
+ # project's subproject_dir even if the wrap file comes from another
+ # subproject.
+ self.dirname = os.path.join(self.subdir_root, self.directory)
+ # Check if the wrap comes from the main project.
+ main_fname = os.path.join(self.subdir_root, self.wrap.basename)
+ if self.wrap.filename != main_fname:
+ rel = os.path.relpath(self.wrap.filename, self.source_dir)
+ mlog.log('Using', mlog.bold(rel))
+ # Write a dummy wrap file in main project that redirect to the
+ # wrap we picked.
+ with open(main_fname, 'w', encoding='utf-8') as f:
+ f.write(textwrap.dedent('''\
+ [wrap-redirect]
+ filename = {}
+ '''.format(os.path.relpath(self.wrap.filename, self.subdir_root))))
+ else:
+ # No wrap file, it's a dummy package definition for an existing
+ # directory. Use the source code in place.
+ self.dirname = self.wrap.filename
+ rel_path = os.path.relpath(self.dirname, self.source_dir)
+
+ meson_file = os.path.join(self.dirname, 'meson.build')
+ cmake_file = os.path.join(self.dirname, 'CMakeLists.txt')
+
+ if method not in ['meson', 'cmake']:
+ raise WrapException('Only the methods "meson" and "cmake" are supported')
+
+ # The directory is there and has meson.build? Great, use it.
+ if method == 'meson' and os.path.exists(meson_file):
+ return rel_path
+ if method == 'cmake' and os.path.exists(cmake_file):
+ return rel_path
+
+ # Check if the subproject is a git submodule
+ self.resolve_git_submodule()
+
+ if os.path.exists(self.dirname):
+ if not os.path.isdir(self.dirname):
+ raise WrapException('Path already exists but is not a directory')
+ else:
+ if self.wrap.type == 'file':
+ self.get_file()
+ else:
+ self.check_can_download()
+ if self.wrap.type == 'git':
+ self.get_git()
+ elif self.wrap.type == "hg":
+ self.get_hg()
+ elif self.wrap.type == "svn":
+ self.get_svn()
+ else:
+ raise WrapException(f'Unknown wrap type {self.wrap.type!r}')
+ self.apply_patch()
+
+ # A meson.build or CMakeLists.txt file is required in the directory
+ if method == 'meson' and not os.path.exists(meson_file):
+ raise WrapException('Subproject exists but has no meson.build file')
+ if method == 'cmake' and not os.path.exists(cmake_file):
+ raise WrapException('Subproject exists but has no CMakeLists.txt file')
+
+ return rel_path
+
+ def check_can_download(self) -> None:
+ # Don't download subproject data based on wrap file if requested.
+ # Git submodules are ok (see above)!
+ if self.wrap_mode is WrapMode.nodownload:
+ m = 'Automatic wrap-based subproject downloading is disabled'
+ raise WrapException(m)
+
+ def resolve_git_submodule(self) -> bool:
+ # Is git installed? If not, we're probably not in a git repository and
+ # definitely cannot try to conveniently set up a submodule.
+ if not GIT:
+ return False
+ # Are we in a git repository?
+ ret, out = quiet_git(['rev-parse'], self.subdir_root)
+ if not ret:
+ return False
+ # Is `dirname` a submodule?
+ ret, out = quiet_git(['submodule', 'status', self.dirname], self.subdir_root)
+ if not ret:
+ return False
+ # Submodule has not been added, add it
+ if out.startswith('+'):
+ mlog.warning('git submodule might be out of date')
+ return True
+ elif out.startswith('U'):
+ raise WrapException('git submodule has merge conflicts')
+ # Submodule exists, but is deinitialized or wasn't initialized
+ elif out.startswith('-'):
+ if verbose_git(['submodule', 'update', '--init', self.dirname], self.subdir_root):
+ return True
+ raise WrapException('git submodule failed to init')
+ # Submodule looks fine, but maybe it wasn't populated properly. Do a checkout.
+ elif out.startswith(' '):
+ verbose_git(['checkout', '.'], self.dirname)
+ # Even if checkout failed, try building it anyway and let the user
+ # handle any problems manually.
+ return True
+ elif out == '':
+ # It is not a submodule, just a folder that exists in the main repository.
+ return False
+ m = 'Unknown git submodule output: {!r}'
+ raise WrapException(m.format(out))
+
+ def get_file(self) -> None:
+ path = self.get_file_internal('source')
+ extract_dir = self.subdir_root
+ # Some upstreams ship packages that do not have a leading directory.
+ # Create one for them.
+ if 'lead_directory_missing' in self.wrap.values:
+ os.mkdir(self.dirname)
+ extract_dir = self.dirname
+ shutil.unpack_archive(path, extract_dir)
+
+ def get_git(self) -> None:
+ if not GIT:
+ raise WrapException('Git program not found.')
+ revno = self.wrap.get('revision')
+ is_shallow = False
+ depth_option = [] # type: T.List[str]
+ if self.wrap.values.get('depth', '') != '':
+ is_shallow = True
+ depth_option = ['--depth', self.wrap.values.get('depth')]
+ # for some reason git only allows commit ids to be shallowly fetched by fetch not with clone
+ if is_shallow and self.is_git_full_commit_id(revno):
+ # git doesn't support directly cloning shallowly for commits,
+ # so we follow https://stackoverflow.com/a/43136160
+ verbose_git(['init', self.directory], self.subdir_root, check=True)
+ verbose_git(['remote', 'add', 'origin', self.wrap.get('url')], self.dirname, check=True)
+ revno = self.wrap.get('revision')
+ verbose_git(['fetch', *depth_option, 'origin', revno], self.dirname, check=True)
+ verbose_git(['checkout', revno, '--'], self.dirname, check=True)
+ if self.wrap.values.get('clone-recursive', '').lower() == 'true':
+ verbose_git(['submodule', 'update', '--init', '--checkout',
+ '--recursive', *depth_option], self.dirname, check=True)
+ push_url = self.wrap.values.get('push-url')
+ if push_url:
+ verbose_git(['remote', 'set-url', '--push', 'origin', push_url], self.dirname, check=True)
+ else:
+ if not is_shallow:
+ verbose_git(['clone', self.wrap.get('url'), self.directory], self.subdir_root, check=True)
+ if revno.lower() != 'head':
+ if not verbose_git(['checkout', revno, '--'], self.dirname):
+ verbose_git(['fetch', self.wrap.get('url'), revno], self.dirname, check=True)
+ verbose_git(['checkout', revno, '--'], self.dirname, check=True)
+ else:
+ verbose_git(['clone', *depth_option, '--branch', revno, self.wrap.get('url'),
+ self.directory], self.subdir_root, check=True)
+ if self.wrap.values.get('clone-recursive', '').lower() == 'true':
+ verbose_git(['submodule', 'update', '--init', '--checkout', '--recursive', *depth_option],
+ self.dirname, check=True)
+ push_url = self.wrap.values.get('push-url')
+ if push_url:
+ verbose_git(['remote', 'set-url', '--push', 'origin', push_url], self.dirname, check=True)
+
+ def is_git_full_commit_id(self, revno: str) -> bool:
+ result = False
+ if len(revno) in (40, 64): # 40 for sha1, 64 for upcoming sha256
+ result = all(ch in '0123456789AaBbCcDdEeFf' for ch in revno)
+ return result
+
+ def get_hg(self) -> None:
+ revno = self.wrap.get('revision')
+ hg = shutil.which('hg')
+ if not hg:
+ raise WrapException('Mercurial program not found.')
+ subprocess.check_call([hg, 'clone', self.wrap.get('url'),
+ self.directory], cwd=self.subdir_root)
+ if revno.lower() != 'tip':
+ subprocess.check_call([hg, 'checkout', revno],
+ cwd=self.dirname)
+
+ def get_svn(self) -> None:
+ revno = self.wrap.get('revision')
+ svn = shutil.which('svn')
+ if not svn:
+ raise WrapException('SVN program not found.')
+ subprocess.check_call([svn, 'checkout', '-r', revno, self.wrap.get('url'),
+ self.directory], cwd=self.subdir_root)
+
+ def get_data(self, urlstring: str) -> T.Tuple[str, str]:
+ blocksize = 10 * 1024
+ h = hashlib.sha256()
+ tmpfile = tempfile.NamedTemporaryFile(mode='wb', dir=self.cachedir, delete=False)
+ url = urllib.parse.urlparse(urlstring)
+ if url.hostname and url.hostname.endswith(WHITELIST_SUBDOMAIN):
+ resp = open_wrapdburl(urlstring)
+ elif WHITELIST_SUBDOMAIN in urlstring:
+ raise WrapException(f'{urlstring} may be a WrapDB-impersonating URL')
+ else:
+ try:
+ req = urllib.request.Request(urlstring, headers={'User-Agent': f'mesonbuild/{coredata.version}'})
+ resp = urllib.request.urlopen(req, timeout=REQ_TIMEOUT)
+ except urllib.error.URLError as e:
+ mlog.log(str(e))
+ raise WrapException(f'could not get {urlstring} is the internet available?')
+ with contextlib.closing(resp) as resp:
+ try:
+ dlsize = int(resp.info()['Content-Length'])
+ except TypeError:
+ dlsize = None
+ if dlsize is None:
+ print('Downloading file of unknown size.')
+ while True:
+ block = resp.read(blocksize)
+ if block == b'':
+ break
+ h.update(block)
+ tmpfile.write(block)
+ hashvalue = h.hexdigest()
+ return hashvalue, tmpfile.name
+ sys.stdout.flush()
+ progress_bar = ProgressBar(bar_type='download', total=dlsize,
+ desc='Downloading')
+ while True:
+ block = resp.read(blocksize)
+ if block == b'':
+ break
+ h.update(block)
+ tmpfile.write(block)
+ progress_bar.update(len(block))
+ progress_bar.close()
+ hashvalue = h.hexdigest()
+ return hashvalue, tmpfile.name
+
+ def check_hash(self, what: str, path: str, hash_required: bool = True) -> None:
+ if what + '_hash' not in self.wrap.values and not hash_required:
+ return
+ expected = self.wrap.get(what + '_hash').lower()
+ h = hashlib.sha256()
+ with open(path, 'rb') as f:
+ h.update(f.read())
+ dhash = h.hexdigest()
+ if dhash != expected:
+ raise WrapException(f'Incorrect hash for {what}:\n {expected} expected\n {dhash} actual.')
+
+ def download(self, what: str, ofname: str, fallback: bool = False) -> None:
+ self.check_can_download()
+ srcurl = self.wrap.get(what + ('_fallback_url' if fallback else '_url'))
+ mlog.log('Downloading', mlog.bold(self.packagename), what, 'from', mlog.bold(srcurl))
+ try:
+ dhash, tmpfile = self.get_data(srcurl)
+ expected = self.wrap.get(what + '_hash').lower()
+ if dhash != expected:
+ os.remove(tmpfile)
+ raise WrapException(f'Incorrect hash for {what}:\n {expected} expected\n {dhash} actual.')
+ except WrapException:
+ if not fallback:
+ if what + '_fallback_url' in self.wrap.values:
+ return self.download(what, ofname, fallback=True)
+ mlog.log('A fallback URL could be specified using',
+ mlog.bold(what + '_fallback_url'), 'key in the wrap file')
+ raise
+ os.rename(tmpfile, ofname)
+
+ def get_file_internal(self, what: str) -> str:
+ filename = self.wrap.get(what + '_filename')
+ if what + '_url' in self.wrap.values:
+ cache_path = os.path.join(self.cachedir, filename)
+
+ if os.path.exists(cache_path):
+ self.check_hash(what, cache_path)
+ mlog.log('Using', mlog.bold(self.packagename), what, 'from cache.')
+ return cache_path
+
+ if not os.path.isdir(self.cachedir):
+ os.mkdir(self.cachedir)
+ self.download(what, cache_path)
+ return cache_path
+ else:
+ from ..interpreterbase import FeatureNew
+ FeatureNew(f'Local wrap patch files without {what}_url', '0.55.0').use(self.current_subproject)
+ path = Path(self.wrap.filesdir) / filename
+
+ if not path.exists():
+ raise WrapException(f'File "{path}" does not exist')
+ self.check_hash(what, path.as_posix(), hash_required=False)
+
+ return path.as_posix()
+
+ def apply_patch(self) -> None:
+ if 'patch_filename' in self.wrap.values and 'patch_directory' in self.wrap.values:
+ m = 'Wrap file {!r} must not have both "patch_filename" and "patch_directory"'
+ raise WrapException(m.format(self.wrap.basename))
+ if 'patch_filename' in self.wrap.values:
+ path = self.get_file_internal('patch')
+ try:
+ shutil.unpack_archive(path, self.subdir_root)
+ except Exception:
+ with tempfile.TemporaryDirectory() as workdir:
+ shutil.unpack_archive(path, workdir)
+ self.copy_tree(workdir, self.subdir_root)
+ elif 'patch_directory' in self.wrap.values:
+ from ..interpreterbase import FeatureNew
+ FeatureNew('patch_directory', '0.55.0').use(self.current_subproject)
+ patch_dir = self.wrap.values['patch_directory']
+ src_dir = os.path.join(self.wrap.filesdir, patch_dir)
+ if not os.path.isdir(src_dir):
+ raise WrapException(f'patch directory does not exists: {patch_dir}')
+ self.copy_tree(src_dir, self.dirname)
+
+ def copy_tree(self, root_src_dir: str, root_dst_dir: str) -> None:
+ """
+ Copy directory tree. Overwrites also read only files.
+ """
+ for src_dir, _, files in os.walk(root_src_dir):
+ dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
+ if not os.path.exists(dst_dir):
+ os.makedirs(dst_dir)
+ for file_ in files:
+ src_file = os.path.join(src_dir, file_)
+ dst_file = os.path.join(dst_dir, file_)
+ if os.path.exists(dst_file):
+ try:
+ os.remove(dst_file)
+ except PermissionError:
+ os.chmod(dst_file, stat.S_IWUSR)
+ os.remove(dst_file)
+ shutil.copy2(src_file, dst_dir)
diff --git a/meson/mesonbuild/wrap/wraptool.py b/meson/mesonbuild/wrap/wraptool.py
new file mode 100644
index 000000000..222996d46
--- /dev/null
+++ b/meson/mesonbuild/wrap/wraptool.py
@@ -0,0 +1,220 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import sys, os
+import configparser
+import shutil
+import typing as T
+
+from glob import glob
+from urllib.parse import urlparse
+from urllib.request import urlopen
+from .wrap import WrapException
+
+from .. import mesonlib
+
+if T.TYPE_CHECKING:
+ import argparse
+
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
+ subparsers = parser.add_subparsers(title='Commands', dest='command')
+ subparsers.required = True
+
+ p = subparsers.add_parser('list', help='show all available projects')
+ p.set_defaults(wrap_func=list_projects)
+
+ p = subparsers.add_parser('search', help='search the db by name')
+ p.add_argument('name')
+ p.set_defaults(wrap_func=search)
+
+ p = subparsers.add_parser('install', help='install the specified project')
+ p.add_argument('name')
+ p.set_defaults(wrap_func=install)
+
+ p = subparsers.add_parser('update', help='update the project to its newest available release')
+ p.add_argument('name')
+ p.set_defaults(wrap_func=update)
+
+ p = subparsers.add_parser('info', help='show available versions of a project')
+ p.add_argument('name')
+ p.set_defaults(wrap_func=info)
+
+ p = subparsers.add_parser('status', help='show installed and available versions of your projects')
+ p.set_defaults(wrap_func=status)
+
+ p = subparsers.add_parser('promote', help='bring a subsubproject up to the master project')
+ p.add_argument('project_path')
+ p.set_defaults(wrap_func=promote)
+
+def get_releases() -> T.Dict[str, T.Any]:
+ url = urlopen('https://wrapdb.mesonbuild.com/v2/releases.json')
+ return T.cast(T.Dict[str, T.Any], json.loads(url.read().decode()))
+
+def list_projects(options: 'argparse.Namespace') -> None:
+ releases = get_releases()
+ for p in releases.keys():
+ print(p)
+
+def search(options: 'argparse.Namespace') -> None:
+ name = options.name
+ releases = get_releases()
+ for p in releases.keys():
+ if p.startswith(name):
+ print(p)
+
+def get_latest_version(name: str) -> tuple:
+ releases = get_releases()
+ info = releases.get(name)
+ if not info:
+ raise WrapException(f'Wrap {name} not found in wrapdb')
+ latest_version = info['versions'][0]
+ version, revision = latest_version.rsplit('-', 1)
+ return version, revision
+
+def install(options: 'argparse.Namespace') -> None:
+ name = options.name
+ if not os.path.isdir('subprojects'):
+ raise SystemExit('Subprojects dir not found. Run this script in your source root directory.')
+ if os.path.isdir(os.path.join('subprojects', name)):
+ raise SystemExit('Subproject directory for this project already exists.')
+ wrapfile = os.path.join('subprojects', name + '.wrap')
+ if os.path.exists(wrapfile):
+ raise SystemExit('Wrap file already exists.')
+ (version, revision) = get_latest_version(name)
+ url = urlopen(f'https://wrapdb.mesonbuild.com/v2/{name}_{version}-{revision}/{name}.wrap')
+ with open(wrapfile, 'wb') as f:
+ f.write(url.read())
+ print(f'Installed {name} version {version} revision {revision}')
+
+def parse_patch_url(patch_url: str) -> T.Tuple[str, str]:
+ u = urlparse(patch_url)
+ if u.netloc != 'wrapdb.mesonbuild.com':
+ raise WrapException(f'URL {patch_url} does not seems to be a wrapdb patch')
+ arr = u.path.strip('/').split('/')
+ if arr[0] == 'v1':
+ # e.g. https://wrapdb.mesonbuild.com/v1/projects/zlib/1.2.11/5/get_zip
+ return arr[-3], arr[-2]
+ elif arr[0] == 'v2':
+ # e.g. https://wrapdb.mesonbuild.com/v2/zlib_1.2.11-5/get_patch
+ tag = arr[-2]
+ name, version = tag.rsplit('_', 1)
+ version, revision = version.rsplit('-', 1)
+ return version, revision
+ else:
+ raise WrapException(f'Invalid wrapdb URL {patch_url}')
+
+def get_current_version(wrapfile: str) -> T.Tuple[str, str, str, str, str]:
+ cp = configparser.ConfigParser(interpolation=None)
+ cp.read(wrapfile)
+ wrap_data = cp['wrap-file']
+ patch_url = wrap_data['patch_url']
+ branch, revision = parse_patch_url(patch_url)
+ return branch, revision, wrap_data['directory'], wrap_data['source_filename'], wrap_data['patch_filename']
+
+def update_wrap_file(wrapfile: str, name: str, new_version: str, new_revision: str) -> None:
+ url = urlopen(f'https://wrapdb.mesonbuild.com/v2/{name}_{new_version}-{new_revision}/{name}.wrap')
+ with open(wrapfile, 'wb') as f:
+ f.write(url.read())
+
+def update(options: 'argparse.Namespace') -> None:
+ name = options.name
+ if not os.path.isdir('subprojects'):
+ raise SystemExit('Subprojects dir not found. Run this command in your source root directory.')
+ wrapfile = os.path.join('subprojects', name + '.wrap')
+ if not os.path.exists(wrapfile):
+ raise SystemExit('Project ' + name + ' is not in use.')
+ (branch, revision, subdir, src_file, patch_file) = get_current_version(wrapfile)
+ (new_branch, new_revision) = get_latest_version(name)
+ if new_branch == branch and new_revision == revision:
+ print('Project ' + name + ' is already up to date.')
+ raise SystemExit
+ update_wrap_file(wrapfile, name, new_branch, new_revision)
+ shutil.rmtree(os.path.join('subprojects', subdir), ignore_errors=True)
+ try:
+ os.unlink(os.path.join('subprojects/packagecache', src_file))
+ except FileNotFoundError:
+ pass
+ try:
+ os.unlink(os.path.join('subprojects/packagecache', patch_file))
+ except FileNotFoundError:
+ pass
+ print(f'Updated {name} version {new_branch} revision {new_revision}')
+
+def info(options: 'argparse.Namespace') -> None:
+ name = options.name
+ releases = get_releases()
+ info = releases.get(name)
+ if not info:
+ raise WrapException(f'Wrap {name} not found in wrapdb')
+ print(f'Available versions of {name}:')
+ for v in info['versions']:
+ print(' ', v)
+
+def do_promotion(from_path: str, spdir_name: str) -> None:
+ if os.path.isfile(from_path):
+ assert(from_path.endswith('.wrap'))
+ shutil.copy(from_path, spdir_name)
+ elif os.path.isdir(from_path):
+ sproj_name = os.path.basename(from_path)
+ outputdir = os.path.join(spdir_name, sproj_name)
+ if os.path.exists(outputdir):
+ raise SystemExit(f'Output dir {outputdir} already exists. Will not overwrite.')
+ shutil.copytree(from_path, outputdir, ignore=shutil.ignore_patterns('subprojects'))
+
+def promote(options: 'argparse.Namespace') -> None:
+ argument = options.project_path
+ spdir_name = 'subprojects'
+ sprojs = mesonlib.detect_subprojects(spdir_name)
+
+ # check if the argument is a full path to a subproject directory or wrap file
+ system_native_path_argument = argument.replace('/', os.sep)
+ for matches in sprojs.values():
+ if system_native_path_argument in matches:
+ do_promotion(system_native_path_argument, spdir_name)
+ return
+
+ # otherwise the argument is just a subproject basename which must be unambiguous
+ if argument not in sprojs:
+ raise SystemExit(f'Subproject {argument} not found in directory tree.')
+ matches = sprojs[argument]
+ if len(matches) > 1:
+ print(f'There is more than one version of {argument} in tree. Please specify which one to promote:\n', file=sys.stderr)
+ for s in matches:
+ print(s, file=sys.stderr)
+ raise SystemExit(1)
+ do_promotion(matches[0], spdir_name)
+
+def status(options: 'argparse.Namespace') -> None:
+ print('Subproject status')
+ for w in glob('subprojects/*.wrap'):
+ name = os.path.basename(w)[:-5]
+ try:
+ (latest_branch, latest_revision) = get_latest_version(name)
+ except Exception:
+ print('', name, 'not available in wrapdb.', file=sys.stderr)
+ continue
+ try:
+ (current_branch, current_revision, _, _, _) = get_current_version(w)
+ except Exception:
+ print('Wrap file not from wrapdb.', file=sys.stderr)
+ continue
+ if current_branch == latest_branch and current_revision == latest_revision:
+ print('', name, f'up to date. Branch {current_branch}, revision {current_revision}.')
+ else:
+ print('', name, f'not up to date. Have {current_branch} {current_revision}, but {latest_branch} {latest_revision} is available.')
+
+def run(options: 'argparse.Namespace') -> int:
+ options.wrap_func(options)
+ return 0